import os
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
import zipfile
import cv2
from skimage import io
import tensorflow as tf
from tensorflow.python.keras import Sequential
from tensorflow.keras import layers, optimizers
from tensorflow.keras.layers import *
from tensorflow.keras.models import Model
from tensorflow.keras.initializers import glorot_uniform
from tensorflow.keras.utils import plot_model
from tensorflow.keras.callbacks import ReduceLROnPlateau, EarlyStopping, ModelCheckpoint, LearningRateScheduler
import tensorflow.keras.backend as K
import random
import glob
from sklearn.preprocessing import StandardScaler, normalize
from IPython.display import display
data = pd.read_csv('../desktop/lgg-mri-segmentation/kaggle_3m/data.csv')
data.info()
<class 'pandas.core.frame.DataFrame'> RangeIndex: 110 entries, 0 to 109 Data columns (total 18 columns): # Column Non-Null Count Dtype --- ------ -------------- ----- 0 Patient 110 non-null object 1 RNASeqCluster 92 non-null float64 2 MethylationCluster 109 non-null float64 3 miRNACluster 110 non-null int64 4 CNCluster 108 non-null float64 5 RPPACluster 98 non-null float64 6 OncosignCluster 105 non-null float64 7 COCCluster 110 non-null int64 8 histological_type 109 non-null float64 9 neoplasm_histologic_grade 109 non-null float64 10 tumor_tissue_site 109 non-null float64 11 laterality 109 non-null float64 12 tumor_location 109 non-null float64 13 gender 109 non-null float64 14 age_at_initial_pathologic 109 non-null float64 15 race 108 non-null float64 16 ethnicity 102 non-null float64 17 death01 109 non-null float64 dtypes: float64(15), int64(2), object(1) memory usage: 15.6+ KB
data_map = []
for sub_dir_path in glob.glob("../desktop/lgg-mri-segmentation/kaggle_3m/"+"*"):
#if os.path.isdir(sub_path_dir):
try:
dir_name = sub_dir_path.split('/')[-1]
for filename in os.listdir(sub_dir_path):
image_path = sub_dir_path + '/' + filename
data_map.extend([dir_name, image_path])
except Exception as e:
print(e)
[WinError 267] The directory name is invalid: '../desktop/lgg-mri-segmentation/kaggle_3m\\data.csv' [WinError 267] The directory name is invalid: '../desktop/lgg-mri-segmentation/kaggle_3m\\README.md'
df = pd.DataFrame({"patient_id" : data_map[::2],
"path" : data_map[1::2]})
df.head()
| patient_id | path | |
|---|---|---|
| 0 | kaggle_3m\TCGA_CS_4941_19960909 | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... |
| 1 | kaggle_3m\TCGA_CS_4941_19960909 | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... |
| 2 | kaggle_3m\TCGA_CS_4941_19960909 | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... |
| 3 | kaggle_3m\TCGA_CS_4941_19960909 | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... |
| 4 | kaggle_3m\TCGA_CS_4941_19960909 | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... |
df_imgs = df[~df['path'].str.contains("mask")]
df_masks = df[df['path'].str.contains("mask")]
# File path line length images for later sorting
BASE_LEN = 86 # len(/kaggle/input/lgg-mri-segmentation/kaggle_3m/TCGA_DU_6404_19850629/TCGA_DU_6404_19850629_ <-!!!43.tif)
END_IMG_LEN = 4 # len(/kaggle/input/lgg-mri-segmentation/kaggle_3m/TCGA_DU_6404_19850629/TCGA_DU_6404_19850629_43 !!!->.tif)
END_MASK_LEN = 9 # (/kaggle/input/lgg-mri-segmentation/kaggle_3m/TCGA_DU_6404_19850629/TCGA_DU_6404_19850629_43 !!!->_mask.tif)
x = len('/kaggle/input/lgg-mri-segmentation/kaggle_3m/TCGA_DU_6404_19850629/TCGA_DU_6404_19850629_ <-!!!43.tif')
y = len('../desktop/lgg-mri-segmentation/kaggle_3m/TCGA_DU_6404_19850629/TCGA_DU_6404_19850629_ <-!!!43.tif')
z = x-y
z
3
# Data sorting
imgs = sorted(df_imgs["path"].values, key=lambda x : int(x[BASE_LEN:-END_IMG_LEN]))
masks = sorted(df_masks["path"].values, key=lambda x : int(x[BASE_LEN:-END_MASK_LEN]))
# Sorting check
idx = random.randint(0, len(imgs)-1)
print("Path to the Image:", imgs[idx], "\nPath to the Mask:", masks[idx])
Path to the Image: ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA_DU_5872_19950223/TCGA_DU_5872_19950223_61.tif Path to the Mask: ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA_DU_5872_19950223/TCGA_DU_5872_19950223_61_mask.tif
# Final dataframe
brain_df = pd.DataFrame({"patient_id": df_imgs.patient_id.values,
"image_path": imgs,
"mask_path": masks
})
def pos_neg_diagnosis(mask_path):
value = np.max(cv2.imread(mask_path))
if value > 0 :
return 1
else:
return 0
brain_df['mask'] = brain_df['mask_path'].apply(lambda x: pos_neg_diagnosis(x))
brain_df
| patient_id | image_path | mask_path | mask | |
|---|---|---|---|---|
| 0 | kaggle_3m\TCGA_CS_4941_19960909 | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | 0 |
| 1 | kaggle_3m\TCGA_CS_4941_19960909 | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | 0 |
| 2 | kaggle_3m\TCGA_CS_4941_19960909 | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | 0 |
| 3 | kaggle_3m\TCGA_CS_4941_19960909 | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | 0 |
| 4 | kaggle_3m\TCGA_CS_4941_19960909 | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | 0 |
| ... | ... | ... | ... | ... |
| 3924 | kaggle_3m\TCGA_HT_A61B_19991127 | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | 0 |
| 3925 | kaggle_3m\TCGA_HT_A61B_19991127 | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | 0 |
| 3926 | kaggle_3m\TCGA_HT_A61B_19991127 | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | 0 |
| 3927 | kaggle_3m\TCGA_HT_A61B_19991127 | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | 0 |
| 3928 | kaggle_3m\TCGA_HT_A61B_19991127 | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | 0 |
3929 rows × 4 columns
count = 0
i = 0
fig,axs = plt.subplots(12,3, figsize=(20,50))
for mask in brain_df['mask']:
if (mask==1):
img = io.imread(brain_df.image_path[i])
axs[count][0].title.set_text("Brain MRI")
axs[count][0].imshow(img)
mask = io.imread(brain_df.mask_path[i])
axs[count][1].title.set_text("Mask")
axs[count][1].imshow(mask, cmap='gray')
img[mask==255] = (0,255,150) # change pixel color at the position of mask
axs[count][2].title.set_text("MRI with Mask")
axs[count][2].imshow(img)
count +=1
i += 1
if (count==12):
break
fig.tight_layout()
brain_df_train = brain_df.drop(columns=['patient_id'])
# Convert the data in mask column to string format, to use categorical mode in flow_from_dataframe
brain_df_train['mask'] = brain_df_train['mask'].apply(lambda x: str(x))
brain_df_train.info()
<class 'pandas.core.frame.DataFrame'> RangeIndex: 3929 entries, 0 to 3928 Data columns (total 3 columns): # Column Non-Null Count Dtype --- ------ -------------- ----- 0 image_path 3929 non-null object 1 mask_path 3929 non-null object 2 mask 3929 non-null object dtypes: object(3) memory usage: 92.2+ KB
from sklearn.model_selection import train_test_split
train, test = train_test_split(brain_df_train, test_size=0.15)
from keras_preprocessing.image import ImageDataGenerator
datagen = ImageDataGenerator(rescale=1./255., validation_split=0.1)
train_generator = datagen.flow_from_dataframe(train,
directory='./',
x_col='image_path',
y_col='mask',
subset='training',
class_mode='categorical',
batch_size=16,
shuffle=True,
target_size=(256,256)
)
valid_generator = datagen.flow_from_dataframe(train,
directory='./',
x_col='image_path',
y_col='mask',
subset='validation',
class_mode='categorical',
batch_size=16,
shuffle=True,
target_size=(256,256)
)
test_datagen = ImageDataGenerator(rescale=1./255.)
test_generator = test_datagen.flow_from_dataframe(test,
directory='./',
x_col='image_path',
y_col='mask',
class_mode='categorical',
batch_size=16,
shuffle=False,
target_size=(256,256)
)
Found 3006 validated image filenames belonging to 2 classes. Found 333 validated image filenames belonging to 2 classes. Found 590 validated image filenames belonging to 2 classes.
from tensorflow.keras.applications.resnet50 import ResNet50
clf_model = ResNet50(weights='imagenet', include_top=False, input_tensor=Input(shape=(256,256,3)))
clf_model.summary()
Model: "resnet50"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_1 (InputLayer) [(None, 256, 256, 3 0 []
)]
conv1_pad (ZeroPadding2D) (None, 262, 262, 3) 0 ['input_1[0][0]']
conv1_conv (Conv2D) (None, 128, 128, 64 9472 ['conv1_pad[0][0]']
)
conv1_bn (BatchNormalization) (None, 128, 128, 64 256 ['conv1_conv[0][0]']
)
conv1_relu (Activation) (None, 128, 128, 64 0 ['conv1_bn[0][0]']
)
pool1_pad (ZeroPadding2D) (None, 130, 130, 64 0 ['conv1_relu[0][0]']
)
pool1_pool (MaxPooling2D) (None, 64, 64, 64) 0 ['pool1_pad[0][0]']
conv2_block1_1_conv (Conv2D) (None, 64, 64, 64) 4160 ['pool1_pool[0][0]']
conv2_block1_1_bn (BatchNormal (None, 64, 64, 64) 256 ['conv2_block1_1_conv[0][0]']
ization)
conv2_block1_1_relu (Activatio (None, 64, 64, 64) 0 ['conv2_block1_1_bn[0][0]']
n)
conv2_block1_2_conv (Conv2D) (None, 64, 64, 64) 36928 ['conv2_block1_1_relu[0][0]']
conv2_block1_2_bn (BatchNormal (None, 64, 64, 64) 256 ['conv2_block1_2_conv[0][0]']
ization)
conv2_block1_2_relu (Activatio (None, 64, 64, 64) 0 ['conv2_block1_2_bn[0][0]']
n)
conv2_block1_0_conv (Conv2D) (None, 64, 64, 256) 16640 ['pool1_pool[0][0]']
conv2_block1_3_conv (Conv2D) (None, 64, 64, 256) 16640 ['conv2_block1_2_relu[0][0]']
conv2_block1_0_bn (BatchNormal (None, 64, 64, 256) 1024 ['conv2_block1_0_conv[0][0]']
ization)
conv2_block1_3_bn (BatchNormal (None, 64, 64, 256) 1024 ['conv2_block1_3_conv[0][0]']
ization)
conv2_block1_add (Add) (None, 64, 64, 256) 0 ['conv2_block1_0_bn[0][0]',
'conv2_block1_3_bn[0][0]']
conv2_block1_out (Activation) (None, 64, 64, 256) 0 ['conv2_block1_add[0][0]']
conv2_block2_1_conv (Conv2D) (None, 64, 64, 64) 16448 ['conv2_block1_out[0][0]']
conv2_block2_1_bn (BatchNormal (None, 64, 64, 64) 256 ['conv2_block2_1_conv[0][0]']
ization)
conv2_block2_1_relu (Activatio (None, 64, 64, 64) 0 ['conv2_block2_1_bn[0][0]']
n)
conv2_block2_2_conv (Conv2D) (None, 64, 64, 64) 36928 ['conv2_block2_1_relu[0][0]']
conv2_block2_2_bn (BatchNormal (None, 64, 64, 64) 256 ['conv2_block2_2_conv[0][0]']
ization)
conv2_block2_2_relu (Activatio (None, 64, 64, 64) 0 ['conv2_block2_2_bn[0][0]']
n)
conv2_block2_3_conv (Conv2D) (None, 64, 64, 256) 16640 ['conv2_block2_2_relu[0][0]']
conv2_block2_3_bn (BatchNormal (None, 64, 64, 256) 1024 ['conv2_block2_3_conv[0][0]']
ization)
conv2_block2_add (Add) (None, 64, 64, 256) 0 ['conv2_block1_out[0][0]',
'conv2_block2_3_bn[0][0]']
conv2_block2_out (Activation) (None, 64, 64, 256) 0 ['conv2_block2_add[0][0]']
conv2_block3_1_conv (Conv2D) (None, 64, 64, 64) 16448 ['conv2_block2_out[0][0]']
conv2_block3_1_bn (BatchNormal (None, 64, 64, 64) 256 ['conv2_block3_1_conv[0][0]']
ization)
conv2_block3_1_relu (Activatio (None, 64, 64, 64) 0 ['conv2_block3_1_bn[0][0]']
n)
conv2_block3_2_conv (Conv2D) (None, 64, 64, 64) 36928 ['conv2_block3_1_relu[0][0]']
conv2_block3_2_bn (BatchNormal (None, 64, 64, 64) 256 ['conv2_block3_2_conv[0][0]']
ization)
conv2_block3_2_relu (Activatio (None, 64, 64, 64) 0 ['conv2_block3_2_bn[0][0]']
n)
conv2_block3_3_conv (Conv2D) (None, 64, 64, 256) 16640 ['conv2_block3_2_relu[0][0]']
conv2_block3_3_bn (BatchNormal (None, 64, 64, 256) 1024 ['conv2_block3_3_conv[0][0]']
ization)
conv2_block3_add (Add) (None, 64, 64, 256) 0 ['conv2_block2_out[0][0]',
'conv2_block3_3_bn[0][0]']
conv2_block3_out (Activation) (None, 64, 64, 256) 0 ['conv2_block3_add[0][0]']
conv3_block1_1_conv (Conv2D) (None, 32, 32, 128) 32896 ['conv2_block3_out[0][0]']
conv3_block1_1_bn (BatchNormal (None, 32, 32, 128) 512 ['conv3_block1_1_conv[0][0]']
ization)
conv3_block1_1_relu (Activatio (None, 32, 32, 128) 0 ['conv3_block1_1_bn[0][0]']
n)
conv3_block1_2_conv (Conv2D) (None, 32, 32, 128) 147584 ['conv3_block1_1_relu[0][0]']
conv3_block1_2_bn (BatchNormal (None, 32, 32, 128) 512 ['conv3_block1_2_conv[0][0]']
ization)
conv3_block1_2_relu (Activatio (None, 32, 32, 128) 0 ['conv3_block1_2_bn[0][0]']
n)
conv3_block1_0_conv (Conv2D) (None, 32, 32, 512) 131584 ['conv2_block3_out[0][0]']
conv3_block1_3_conv (Conv2D) (None, 32, 32, 512) 66048 ['conv3_block1_2_relu[0][0]']
conv3_block1_0_bn (BatchNormal (None, 32, 32, 512) 2048 ['conv3_block1_0_conv[0][0]']
ization)
conv3_block1_3_bn (BatchNormal (None, 32, 32, 512) 2048 ['conv3_block1_3_conv[0][0]']
ization)
conv3_block1_add (Add) (None, 32, 32, 512) 0 ['conv3_block1_0_bn[0][0]',
'conv3_block1_3_bn[0][0]']
conv3_block1_out (Activation) (None, 32, 32, 512) 0 ['conv3_block1_add[0][0]']
conv3_block2_1_conv (Conv2D) (None, 32, 32, 128) 65664 ['conv3_block1_out[0][0]']
conv3_block2_1_bn (BatchNormal (None, 32, 32, 128) 512 ['conv3_block2_1_conv[0][0]']
ization)
conv3_block2_1_relu (Activatio (None, 32, 32, 128) 0 ['conv3_block2_1_bn[0][0]']
n)
conv3_block2_2_conv (Conv2D) (None, 32, 32, 128) 147584 ['conv3_block2_1_relu[0][0]']
conv3_block2_2_bn (BatchNormal (None, 32, 32, 128) 512 ['conv3_block2_2_conv[0][0]']
ization)
conv3_block2_2_relu (Activatio (None, 32, 32, 128) 0 ['conv3_block2_2_bn[0][0]']
n)
conv3_block2_3_conv (Conv2D) (None, 32, 32, 512) 66048 ['conv3_block2_2_relu[0][0]']
conv3_block2_3_bn (BatchNormal (None, 32, 32, 512) 2048 ['conv3_block2_3_conv[0][0]']
ization)
conv3_block2_add (Add) (None, 32, 32, 512) 0 ['conv3_block1_out[0][0]',
'conv3_block2_3_bn[0][0]']
conv3_block2_out (Activation) (None, 32, 32, 512) 0 ['conv3_block2_add[0][0]']
conv3_block3_1_conv (Conv2D) (None, 32, 32, 128) 65664 ['conv3_block2_out[0][0]']
conv3_block3_1_bn (BatchNormal (None, 32, 32, 128) 512 ['conv3_block3_1_conv[0][0]']
ization)
conv3_block3_1_relu (Activatio (None, 32, 32, 128) 0 ['conv3_block3_1_bn[0][0]']
n)
conv3_block3_2_conv (Conv2D) (None, 32, 32, 128) 147584 ['conv3_block3_1_relu[0][0]']
conv3_block3_2_bn (BatchNormal (None, 32, 32, 128) 512 ['conv3_block3_2_conv[0][0]']
ization)
conv3_block3_2_relu (Activatio (None, 32, 32, 128) 0 ['conv3_block3_2_bn[0][0]']
n)
conv3_block3_3_conv (Conv2D) (None, 32, 32, 512) 66048 ['conv3_block3_2_relu[0][0]']
conv3_block3_3_bn (BatchNormal (None, 32, 32, 512) 2048 ['conv3_block3_3_conv[0][0]']
ization)
conv3_block3_add (Add) (None, 32, 32, 512) 0 ['conv3_block2_out[0][0]',
'conv3_block3_3_bn[0][0]']
conv3_block3_out (Activation) (None, 32, 32, 512) 0 ['conv3_block3_add[0][0]']
conv3_block4_1_conv (Conv2D) (None, 32, 32, 128) 65664 ['conv3_block3_out[0][0]']
conv3_block4_1_bn (BatchNormal (None, 32, 32, 128) 512 ['conv3_block4_1_conv[0][0]']
ization)
conv3_block4_1_relu (Activatio (None, 32, 32, 128) 0 ['conv3_block4_1_bn[0][0]']
n)
conv3_block4_2_conv (Conv2D) (None, 32, 32, 128) 147584 ['conv3_block4_1_relu[0][0]']
conv3_block4_2_bn (BatchNormal (None, 32, 32, 128) 512 ['conv3_block4_2_conv[0][0]']
ization)
conv3_block4_2_relu (Activatio (None, 32, 32, 128) 0 ['conv3_block4_2_bn[0][0]']
n)
conv3_block4_3_conv (Conv2D) (None, 32, 32, 512) 66048 ['conv3_block4_2_relu[0][0]']
conv3_block4_3_bn (BatchNormal (None, 32, 32, 512) 2048 ['conv3_block4_3_conv[0][0]']
ization)
conv3_block4_add (Add) (None, 32, 32, 512) 0 ['conv3_block3_out[0][0]',
'conv3_block4_3_bn[0][0]']
conv3_block4_out (Activation) (None, 32, 32, 512) 0 ['conv3_block4_add[0][0]']
conv4_block1_1_conv (Conv2D) (None, 16, 16, 256) 131328 ['conv3_block4_out[0][0]']
conv4_block1_1_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block1_1_conv[0][0]']
ization)
conv4_block1_1_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block1_1_bn[0][0]']
n)
conv4_block1_2_conv (Conv2D) (None, 16, 16, 256) 590080 ['conv4_block1_1_relu[0][0]']
conv4_block1_2_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block1_2_conv[0][0]']
ization)
conv4_block1_2_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block1_2_bn[0][0]']
n)
conv4_block1_0_conv (Conv2D) (None, 16, 16, 1024 525312 ['conv3_block4_out[0][0]']
)
conv4_block1_3_conv (Conv2D) (None, 16, 16, 1024 263168 ['conv4_block1_2_relu[0][0]']
)
conv4_block1_0_bn (BatchNormal (None, 16, 16, 1024 4096 ['conv4_block1_0_conv[0][0]']
ization) )
conv4_block1_3_bn (BatchNormal (None, 16, 16, 1024 4096 ['conv4_block1_3_conv[0][0]']
ization) )
conv4_block1_add (Add) (None, 16, 16, 1024 0 ['conv4_block1_0_bn[0][0]',
) 'conv4_block1_3_bn[0][0]']
conv4_block1_out (Activation) (None, 16, 16, 1024 0 ['conv4_block1_add[0][0]']
)
conv4_block2_1_conv (Conv2D) (None, 16, 16, 256) 262400 ['conv4_block1_out[0][0]']
conv4_block2_1_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block2_1_conv[0][0]']
ization)
conv4_block2_1_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block2_1_bn[0][0]']
n)
conv4_block2_2_conv (Conv2D) (None, 16, 16, 256) 590080 ['conv4_block2_1_relu[0][0]']
conv4_block2_2_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block2_2_conv[0][0]']
ization)
conv4_block2_2_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block2_2_bn[0][0]']
n)
conv4_block2_3_conv (Conv2D) (None, 16, 16, 1024 263168 ['conv4_block2_2_relu[0][0]']
)
conv4_block2_3_bn (BatchNormal (None, 16, 16, 1024 4096 ['conv4_block2_3_conv[0][0]']
ization) )
conv4_block2_add (Add) (None, 16, 16, 1024 0 ['conv4_block1_out[0][0]',
) 'conv4_block2_3_bn[0][0]']
conv4_block2_out (Activation) (None, 16, 16, 1024 0 ['conv4_block2_add[0][0]']
)
conv4_block3_1_conv (Conv2D) (None, 16, 16, 256) 262400 ['conv4_block2_out[0][0]']
conv4_block3_1_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block3_1_conv[0][0]']
ization)
conv4_block3_1_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block3_1_bn[0][0]']
n)
conv4_block3_2_conv (Conv2D) (None, 16, 16, 256) 590080 ['conv4_block3_1_relu[0][0]']
conv4_block3_2_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block3_2_conv[0][0]']
ization)
conv4_block3_2_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block3_2_bn[0][0]']
n)
conv4_block3_3_conv (Conv2D) (None, 16, 16, 1024 263168 ['conv4_block3_2_relu[0][0]']
)
conv4_block3_3_bn (BatchNormal (None, 16, 16, 1024 4096 ['conv4_block3_3_conv[0][0]']
ization) )
conv4_block3_add (Add) (None, 16, 16, 1024 0 ['conv4_block2_out[0][0]',
) 'conv4_block3_3_bn[0][0]']
conv4_block3_out (Activation) (None, 16, 16, 1024 0 ['conv4_block3_add[0][0]']
)
conv4_block4_1_conv (Conv2D) (None, 16, 16, 256) 262400 ['conv4_block3_out[0][0]']
conv4_block4_1_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block4_1_conv[0][0]']
ization)
conv4_block4_1_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block4_1_bn[0][0]']
n)
conv4_block4_2_conv (Conv2D) (None, 16, 16, 256) 590080 ['conv4_block4_1_relu[0][0]']
conv4_block4_2_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block4_2_conv[0][0]']
ization)
conv4_block4_2_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block4_2_bn[0][0]']
n)
conv4_block4_3_conv (Conv2D) (None, 16, 16, 1024 263168 ['conv4_block4_2_relu[0][0]']
)
conv4_block4_3_bn (BatchNormal (None, 16, 16, 1024 4096 ['conv4_block4_3_conv[0][0]']
ization) )
conv4_block4_add (Add) (None, 16, 16, 1024 0 ['conv4_block3_out[0][0]',
) 'conv4_block4_3_bn[0][0]']
conv4_block4_out (Activation) (None, 16, 16, 1024 0 ['conv4_block4_add[0][0]']
)
conv4_block5_1_conv (Conv2D) (None, 16, 16, 256) 262400 ['conv4_block4_out[0][0]']
conv4_block5_1_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block5_1_conv[0][0]']
ization)
conv4_block5_1_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block5_1_bn[0][0]']
n)
conv4_block5_2_conv (Conv2D) (None, 16, 16, 256) 590080 ['conv4_block5_1_relu[0][0]']
conv4_block5_2_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block5_2_conv[0][0]']
ization)
conv4_block5_2_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block5_2_bn[0][0]']
n)
conv4_block5_3_conv (Conv2D) (None, 16, 16, 1024 263168 ['conv4_block5_2_relu[0][0]']
)
conv4_block5_3_bn (BatchNormal (None, 16, 16, 1024 4096 ['conv4_block5_3_conv[0][0]']
ization) )
conv4_block5_add (Add) (None, 16, 16, 1024 0 ['conv4_block4_out[0][0]',
) 'conv4_block5_3_bn[0][0]']
conv4_block5_out (Activation) (None, 16, 16, 1024 0 ['conv4_block5_add[0][0]']
)
conv4_block6_1_conv (Conv2D) (None, 16, 16, 256) 262400 ['conv4_block5_out[0][0]']
conv4_block6_1_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block6_1_conv[0][0]']
ization)
conv4_block6_1_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block6_1_bn[0][0]']
n)
conv4_block6_2_conv (Conv2D) (None, 16, 16, 256) 590080 ['conv4_block6_1_relu[0][0]']
conv4_block6_2_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block6_2_conv[0][0]']
ization)
conv4_block6_2_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block6_2_bn[0][0]']
n)
conv4_block6_3_conv (Conv2D) (None, 16, 16, 1024 263168 ['conv4_block6_2_relu[0][0]']
)
conv4_block6_3_bn (BatchNormal (None, 16, 16, 1024 4096 ['conv4_block6_3_conv[0][0]']
ization) )
conv4_block6_add (Add) (None, 16, 16, 1024 0 ['conv4_block5_out[0][0]',
) 'conv4_block6_3_bn[0][0]']
conv4_block6_out (Activation) (None, 16, 16, 1024 0 ['conv4_block6_add[0][0]']
)
conv5_block1_1_conv (Conv2D) (None, 8, 8, 512) 524800 ['conv4_block6_out[0][0]']
conv5_block1_1_bn (BatchNormal (None, 8, 8, 512) 2048 ['conv5_block1_1_conv[0][0]']
ization)
conv5_block1_1_relu (Activatio (None, 8, 8, 512) 0 ['conv5_block1_1_bn[0][0]']
n)
conv5_block1_2_conv (Conv2D) (None, 8, 8, 512) 2359808 ['conv5_block1_1_relu[0][0]']
conv5_block1_2_bn (BatchNormal (None, 8, 8, 512) 2048 ['conv5_block1_2_conv[0][0]']
ization)
conv5_block1_2_relu (Activatio (None, 8, 8, 512) 0 ['conv5_block1_2_bn[0][0]']
n)
conv5_block1_0_conv (Conv2D) (None, 8, 8, 2048) 2099200 ['conv4_block6_out[0][0]']
conv5_block1_3_conv (Conv2D) (None, 8, 8, 2048) 1050624 ['conv5_block1_2_relu[0][0]']
conv5_block1_0_bn (BatchNormal (None, 8, 8, 2048) 8192 ['conv5_block1_0_conv[0][0]']
ization)
conv5_block1_3_bn (BatchNormal (None, 8, 8, 2048) 8192 ['conv5_block1_3_conv[0][0]']
ization)
conv5_block1_add (Add) (None, 8, 8, 2048) 0 ['conv5_block1_0_bn[0][0]',
'conv5_block1_3_bn[0][0]']
conv5_block1_out (Activation) (None, 8, 8, 2048) 0 ['conv5_block1_add[0][0]']
conv5_block2_1_conv (Conv2D) (None, 8, 8, 512) 1049088 ['conv5_block1_out[0][0]']
conv5_block2_1_bn (BatchNormal (None, 8, 8, 512) 2048 ['conv5_block2_1_conv[0][0]']
ization)
conv5_block2_1_relu (Activatio (None, 8, 8, 512) 0 ['conv5_block2_1_bn[0][0]']
n)
conv5_block2_2_conv (Conv2D) (None, 8, 8, 512) 2359808 ['conv5_block2_1_relu[0][0]']
conv5_block2_2_bn (BatchNormal (None, 8, 8, 512) 2048 ['conv5_block2_2_conv[0][0]']
ization)
conv5_block2_2_relu (Activatio (None, 8, 8, 512) 0 ['conv5_block2_2_bn[0][0]']
n)
conv5_block2_3_conv (Conv2D) (None, 8, 8, 2048) 1050624 ['conv5_block2_2_relu[0][0]']
conv5_block2_3_bn (BatchNormal (None, 8, 8, 2048) 8192 ['conv5_block2_3_conv[0][0]']
ization)
conv5_block2_add (Add) (None, 8, 8, 2048) 0 ['conv5_block1_out[0][0]',
'conv5_block2_3_bn[0][0]']
conv5_block2_out (Activation) (None, 8, 8, 2048) 0 ['conv5_block2_add[0][0]']
conv5_block3_1_conv (Conv2D) (None, 8, 8, 512) 1049088 ['conv5_block2_out[0][0]']
conv5_block3_1_bn (BatchNormal (None, 8, 8, 512) 2048 ['conv5_block3_1_conv[0][0]']
ization)
conv5_block3_1_relu (Activatio (None, 8, 8, 512) 0 ['conv5_block3_1_bn[0][0]']
n)
conv5_block3_2_conv (Conv2D) (None, 8, 8, 512) 2359808 ['conv5_block3_1_relu[0][0]']
conv5_block3_2_bn (BatchNormal (None, 8, 8, 512) 2048 ['conv5_block3_2_conv[0][0]']
ization)
conv5_block3_2_relu (Activatio (None, 8, 8, 512) 0 ['conv5_block3_2_bn[0][0]']
n)
conv5_block3_3_conv (Conv2D) (None, 8, 8, 2048) 1050624 ['conv5_block3_2_relu[0][0]']
conv5_block3_3_bn (BatchNormal (None, 8, 8, 2048) 8192 ['conv5_block3_3_conv[0][0]']
ization)
conv5_block3_add (Add) (None, 8, 8, 2048) 0 ['conv5_block2_out[0][0]',
'conv5_block3_3_bn[0][0]']
conv5_block3_out (Activation) (None, 8, 8, 2048) 0 ['conv5_block3_add[0][0]']
==================================================================================================
Total params: 23,587,712
Trainable params: 23,534,592
Non-trainable params: 53,120
__________________________________________________________________________________________________
# before this i tried with trainable layer but the accuracy was less as compared
for layer in clf_model.layers:
layers.trainable = False
head = clf_model.output
head = AveragePooling2D(pool_size=(4,4))(head)
head = Flatten(name='Flatten')(head)
head = Dense(256, activation='relu')(head)
head = Dropout(0.3)(head)
head = Dense(256, activation='relu')(head)
head = Dropout(0.3)(head)
head = Dense(2, activation='softmax')(head)
model = Model(clf_model.input, head)
model.compile(loss = 'categorical_crossentropy',
optimizer='adam',
metrics= ["accuracy"]
)
model.summary()
Model: "model"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_1 (InputLayer) [(None, 256, 256, 3 0 []
)]
conv1_pad (ZeroPadding2D) (None, 262, 262, 3) 0 ['input_1[0][0]']
conv1_conv (Conv2D) (None, 128, 128, 64 9472 ['conv1_pad[0][0]']
)
conv1_bn (BatchNormalization) (None, 128, 128, 64 256 ['conv1_conv[0][0]']
)
conv1_relu (Activation) (None, 128, 128, 64 0 ['conv1_bn[0][0]']
)
pool1_pad (ZeroPadding2D) (None, 130, 130, 64 0 ['conv1_relu[0][0]']
)
pool1_pool (MaxPooling2D) (None, 64, 64, 64) 0 ['pool1_pad[0][0]']
conv2_block1_1_conv (Conv2D) (None, 64, 64, 64) 4160 ['pool1_pool[0][0]']
conv2_block1_1_bn (BatchNormal (None, 64, 64, 64) 256 ['conv2_block1_1_conv[0][0]']
ization)
conv2_block1_1_relu (Activatio (None, 64, 64, 64) 0 ['conv2_block1_1_bn[0][0]']
n)
conv2_block1_2_conv (Conv2D) (None, 64, 64, 64) 36928 ['conv2_block1_1_relu[0][0]']
conv2_block1_2_bn (BatchNormal (None, 64, 64, 64) 256 ['conv2_block1_2_conv[0][0]']
ization)
conv2_block1_2_relu (Activatio (None, 64, 64, 64) 0 ['conv2_block1_2_bn[0][0]']
n)
conv2_block1_0_conv (Conv2D) (None, 64, 64, 256) 16640 ['pool1_pool[0][0]']
conv2_block1_3_conv (Conv2D) (None, 64, 64, 256) 16640 ['conv2_block1_2_relu[0][0]']
conv2_block1_0_bn (BatchNormal (None, 64, 64, 256) 1024 ['conv2_block1_0_conv[0][0]']
ization)
conv2_block1_3_bn (BatchNormal (None, 64, 64, 256) 1024 ['conv2_block1_3_conv[0][0]']
ization)
conv2_block1_add (Add) (None, 64, 64, 256) 0 ['conv2_block1_0_bn[0][0]',
'conv2_block1_3_bn[0][0]']
conv2_block1_out (Activation) (None, 64, 64, 256) 0 ['conv2_block1_add[0][0]']
conv2_block2_1_conv (Conv2D) (None, 64, 64, 64) 16448 ['conv2_block1_out[0][0]']
conv2_block2_1_bn (BatchNormal (None, 64, 64, 64) 256 ['conv2_block2_1_conv[0][0]']
ization)
conv2_block2_1_relu (Activatio (None, 64, 64, 64) 0 ['conv2_block2_1_bn[0][0]']
n)
conv2_block2_2_conv (Conv2D) (None, 64, 64, 64) 36928 ['conv2_block2_1_relu[0][0]']
conv2_block2_2_bn (BatchNormal (None, 64, 64, 64) 256 ['conv2_block2_2_conv[0][0]']
ization)
conv2_block2_2_relu (Activatio (None, 64, 64, 64) 0 ['conv2_block2_2_bn[0][0]']
n)
conv2_block2_3_conv (Conv2D) (None, 64, 64, 256) 16640 ['conv2_block2_2_relu[0][0]']
conv2_block2_3_bn (BatchNormal (None, 64, 64, 256) 1024 ['conv2_block2_3_conv[0][0]']
ization)
conv2_block2_add (Add) (None, 64, 64, 256) 0 ['conv2_block1_out[0][0]',
'conv2_block2_3_bn[0][0]']
conv2_block2_out (Activation) (None, 64, 64, 256) 0 ['conv2_block2_add[0][0]']
conv2_block3_1_conv (Conv2D) (None, 64, 64, 64) 16448 ['conv2_block2_out[0][0]']
conv2_block3_1_bn (BatchNormal (None, 64, 64, 64) 256 ['conv2_block3_1_conv[0][0]']
ization)
conv2_block3_1_relu (Activatio (None, 64, 64, 64) 0 ['conv2_block3_1_bn[0][0]']
n)
conv2_block3_2_conv (Conv2D) (None, 64, 64, 64) 36928 ['conv2_block3_1_relu[0][0]']
conv2_block3_2_bn (BatchNormal (None, 64, 64, 64) 256 ['conv2_block3_2_conv[0][0]']
ization)
conv2_block3_2_relu (Activatio (None, 64, 64, 64) 0 ['conv2_block3_2_bn[0][0]']
n)
conv2_block3_3_conv (Conv2D) (None, 64, 64, 256) 16640 ['conv2_block3_2_relu[0][0]']
conv2_block3_3_bn (BatchNormal (None, 64, 64, 256) 1024 ['conv2_block3_3_conv[0][0]']
ization)
conv2_block3_add (Add) (None, 64, 64, 256) 0 ['conv2_block2_out[0][0]',
'conv2_block3_3_bn[0][0]']
conv2_block3_out (Activation) (None, 64, 64, 256) 0 ['conv2_block3_add[0][0]']
conv3_block1_1_conv (Conv2D) (None, 32, 32, 128) 32896 ['conv2_block3_out[0][0]']
conv3_block1_1_bn (BatchNormal (None, 32, 32, 128) 512 ['conv3_block1_1_conv[0][0]']
ization)
conv3_block1_1_relu (Activatio (None, 32, 32, 128) 0 ['conv3_block1_1_bn[0][0]']
n)
conv3_block1_2_conv (Conv2D) (None, 32, 32, 128) 147584 ['conv3_block1_1_relu[0][0]']
conv3_block1_2_bn (BatchNormal (None, 32, 32, 128) 512 ['conv3_block1_2_conv[0][0]']
ization)
conv3_block1_2_relu (Activatio (None, 32, 32, 128) 0 ['conv3_block1_2_bn[0][0]']
n)
conv3_block1_0_conv (Conv2D) (None, 32, 32, 512) 131584 ['conv2_block3_out[0][0]']
conv3_block1_3_conv (Conv2D) (None, 32, 32, 512) 66048 ['conv3_block1_2_relu[0][0]']
conv3_block1_0_bn (BatchNormal (None, 32, 32, 512) 2048 ['conv3_block1_0_conv[0][0]']
ization)
conv3_block1_3_bn (BatchNormal (None, 32, 32, 512) 2048 ['conv3_block1_3_conv[0][0]']
ization)
conv3_block1_add (Add) (None, 32, 32, 512) 0 ['conv3_block1_0_bn[0][0]',
'conv3_block1_3_bn[0][0]']
conv3_block1_out (Activation) (None, 32, 32, 512) 0 ['conv3_block1_add[0][0]']
conv3_block2_1_conv (Conv2D) (None, 32, 32, 128) 65664 ['conv3_block1_out[0][0]']
conv3_block2_1_bn (BatchNormal (None, 32, 32, 128) 512 ['conv3_block2_1_conv[0][0]']
ization)
conv3_block2_1_relu (Activatio (None, 32, 32, 128) 0 ['conv3_block2_1_bn[0][0]']
n)
conv3_block2_2_conv (Conv2D) (None, 32, 32, 128) 147584 ['conv3_block2_1_relu[0][0]']
conv3_block2_2_bn (BatchNormal (None, 32, 32, 128) 512 ['conv3_block2_2_conv[0][0]']
ization)
conv3_block2_2_relu (Activatio (None, 32, 32, 128) 0 ['conv3_block2_2_bn[0][0]']
n)
conv3_block2_3_conv (Conv2D) (None, 32, 32, 512) 66048 ['conv3_block2_2_relu[0][0]']
conv3_block2_3_bn (BatchNormal (None, 32, 32, 512) 2048 ['conv3_block2_3_conv[0][0]']
ization)
conv3_block2_add (Add) (None, 32, 32, 512) 0 ['conv3_block1_out[0][0]',
'conv3_block2_3_bn[0][0]']
conv3_block2_out (Activation) (None, 32, 32, 512) 0 ['conv3_block2_add[0][0]']
conv3_block3_1_conv (Conv2D) (None, 32, 32, 128) 65664 ['conv3_block2_out[0][0]']
conv3_block3_1_bn (BatchNormal (None, 32, 32, 128) 512 ['conv3_block3_1_conv[0][0]']
ization)
conv3_block3_1_relu (Activatio (None, 32, 32, 128) 0 ['conv3_block3_1_bn[0][0]']
n)
conv3_block3_2_conv (Conv2D) (None, 32, 32, 128) 147584 ['conv3_block3_1_relu[0][0]']
conv3_block3_2_bn (BatchNormal (None, 32, 32, 128) 512 ['conv3_block3_2_conv[0][0]']
ization)
conv3_block3_2_relu (Activatio (None, 32, 32, 128) 0 ['conv3_block3_2_bn[0][0]']
n)
conv3_block3_3_conv (Conv2D) (None, 32, 32, 512) 66048 ['conv3_block3_2_relu[0][0]']
conv3_block3_3_bn (BatchNormal (None, 32, 32, 512) 2048 ['conv3_block3_3_conv[0][0]']
ization)
conv3_block3_add (Add) (None, 32, 32, 512) 0 ['conv3_block2_out[0][0]',
'conv3_block3_3_bn[0][0]']
conv3_block3_out (Activation) (None, 32, 32, 512) 0 ['conv3_block3_add[0][0]']
conv3_block4_1_conv (Conv2D) (None, 32, 32, 128) 65664 ['conv3_block3_out[0][0]']
conv3_block4_1_bn (BatchNormal (None, 32, 32, 128) 512 ['conv3_block4_1_conv[0][0]']
ization)
conv3_block4_1_relu (Activatio (None, 32, 32, 128) 0 ['conv3_block4_1_bn[0][0]']
n)
conv3_block4_2_conv (Conv2D) (None, 32, 32, 128) 147584 ['conv3_block4_1_relu[0][0]']
conv3_block4_2_bn (BatchNormal (None, 32, 32, 128) 512 ['conv3_block4_2_conv[0][0]']
ization)
conv3_block4_2_relu (Activatio (None, 32, 32, 128) 0 ['conv3_block4_2_bn[0][0]']
n)
conv3_block4_3_conv (Conv2D) (None, 32, 32, 512) 66048 ['conv3_block4_2_relu[0][0]']
conv3_block4_3_bn (BatchNormal (None, 32, 32, 512) 2048 ['conv3_block4_3_conv[0][0]']
ization)
conv3_block4_add (Add) (None, 32, 32, 512) 0 ['conv3_block3_out[0][0]',
'conv3_block4_3_bn[0][0]']
conv3_block4_out (Activation) (None, 32, 32, 512) 0 ['conv3_block4_add[0][0]']
conv4_block1_1_conv (Conv2D) (None, 16, 16, 256) 131328 ['conv3_block4_out[0][0]']
conv4_block1_1_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block1_1_conv[0][0]']
ization)
conv4_block1_1_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block1_1_bn[0][0]']
n)
conv4_block1_2_conv (Conv2D) (None, 16, 16, 256) 590080 ['conv4_block1_1_relu[0][0]']
conv4_block1_2_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block1_2_conv[0][0]']
ization)
conv4_block1_2_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block1_2_bn[0][0]']
n)
conv4_block1_0_conv (Conv2D) (None, 16, 16, 1024 525312 ['conv3_block4_out[0][0]']
)
conv4_block1_3_conv (Conv2D) (None, 16, 16, 1024 263168 ['conv4_block1_2_relu[0][0]']
)
conv4_block1_0_bn (BatchNormal (None, 16, 16, 1024 4096 ['conv4_block1_0_conv[0][0]']
ization) )
conv4_block1_3_bn (BatchNormal (None, 16, 16, 1024 4096 ['conv4_block1_3_conv[0][0]']
ization) )
conv4_block1_add (Add) (None, 16, 16, 1024 0 ['conv4_block1_0_bn[0][0]',
) 'conv4_block1_3_bn[0][0]']
conv4_block1_out (Activation) (None, 16, 16, 1024 0 ['conv4_block1_add[0][0]']
)
conv4_block2_1_conv (Conv2D) (None, 16, 16, 256) 262400 ['conv4_block1_out[0][0]']
conv4_block2_1_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block2_1_conv[0][0]']
ization)
conv4_block2_1_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block2_1_bn[0][0]']
n)
conv4_block2_2_conv (Conv2D) (None, 16, 16, 256) 590080 ['conv4_block2_1_relu[0][0]']
conv4_block2_2_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block2_2_conv[0][0]']
ization)
conv4_block2_2_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block2_2_bn[0][0]']
n)
conv4_block2_3_conv (Conv2D) (None, 16, 16, 1024 263168 ['conv4_block2_2_relu[0][0]']
)
conv4_block2_3_bn (BatchNormal (None, 16, 16, 1024 4096 ['conv4_block2_3_conv[0][0]']
ization) )
conv4_block2_add (Add) (None, 16, 16, 1024 0 ['conv4_block1_out[0][0]',
) 'conv4_block2_3_bn[0][0]']
conv4_block2_out (Activation) (None, 16, 16, 1024 0 ['conv4_block2_add[0][0]']
)
conv4_block3_1_conv (Conv2D) (None, 16, 16, 256) 262400 ['conv4_block2_out[0][0]']
conv4_block3_1_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block3_1_conv[0][0]']
ization)
conv4_block3_1_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block3_1_bn[0][0]']
n)
conv4_block3_2_conv (Conv2D) (None, 16, 16, 256) 590080 ['conv4_block3_1_relu[0][0]']
conv4_block3_2_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block3_2_conv[0][0]']
ization)
conv4_block3_2_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block3_2_bn[0][0]']
n)
conv4_block3_3_conv (Conv2D) (None, 16, 16, 1024 263168 ['conv4_block3_2_relu[0][0]']
)
conv4_block3_3_bn (BatchNormal (None, 16, 16, 1024 4096 ['conv4_block3_3_conv[0][0]']
ization) )
conv4_block3_add (Add) (None, 16, 16, 1024 0 ['conv4_block2_out[0][0]',
) 'conv4_block3_3_bn[0][0]']
conv4_block3_out (Activation) (None, 16, 16, 1024 0 ['conv4_block3_add[0][0]']
)
conv4_block4_1_conv (Conv2D) (None, 16, 16, 256) 262400 ['conv4_block3_out[0][0]']
conv4_block4_1_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block4_1_conv[0][0]']
ization)
conv4_block4_1_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block4_1_bn[0][0]']
n)
conv4_block4_2_conv (Conv2D) (None, 16, 16, 256) 590080 ['conv4_block4_1_relu[0][0]']
conv4_block4_2_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block4_2_conv[0][0]']
ization)
conv4_block4_2_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block4_2_bn[0][0]']
n)
conv4_block4_3_conv (Conv2D) (None, 16, 16, 1024 263168 ['conv4_block4_2_relu[0][0]']
)
conv4_block4_3_bn (BatchNormal (None, 16, 16, 1024 4096 ['conv4_block4_3_conv[0][0]']
ization) )
conv4_block4_add (Add) (None, 16, 16, 1024 0 ['conv4_block3_out[0][0]',
) 'conv4_block4_3_bn[0][0]']
conv4_block4_out (Activation) (None, 16, 16, 1024 0 ['conv4_block4_add[0][0]']
)
conv4_block5_1_conv (Conv2D) (None, 16, 16, 256) 262400 ['conv4_block4_out[0][0]']
conv4_block5_1_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block5_1_conv[0][0]']
ization)
conv4_block5_1_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block5_1_bn[0][0]']
n)
conv4_block5_2_conv (Conv2D) (None, 16, 16, 256) 590080 ['conv4_block5_1_relu[0][0]']
conv4_block5_2_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block5_2_conv[0][0]']
ization)
conv4_block5_2_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block5_2_bn[0][0]']
n)
conv4_block5_3_conv (Conv2D) (None, 16, 16, 1024 263168 ['conv4_block5_2_relu[0][0]']
)
conv4_block5_3_bn (BatchNormal (None, 16, 16, 1024 4096 ['conv4_block5_3_conv[0][0]']
ization) )
conv4_block5_add (Add) (None, 16, 16, 1024 0 ['conv4_block4_out[0][0]',
) 'conv4_block5_3_bn[0][0]']
conv4_block5_out (Activation) (None, 16, 16, 1024 0 ['conv4_block5_add[0][0]']
)
conv4_block6_1_conv (Conv2D) (None, 16, 16, 256) 262400 ['conv4_block5_out[0][0]']
conv4_block6_1_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block6_1_conv[0][0]']
ization)
conv4_block6_1_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block6_1_bn[0][0]']
n)
conv4_block6_2_conv (Conv2D) (None, 16, 16, 256) 590080 ['conv4_block6_1_relu[0][0]']
conv4_block6_2_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block6_2_conv[0][0]']
ization)
conv4_block6_2_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block6_2_bn[0][0]']
n)
conv4_block6_3_conv (Conv2D) (None, 16, 16, 1024 263168 ['conv4_block6_2_relu[0][0]']
)
conv4_block6_3_bn (BatchNormal (None, 16, 16, 1024 4096 ['conv4_block6_3_conv[0][0]']
ization) )
conv4_block6_add (Add) (None, 16, 16, 1024 0 ['conv4_block5_out[0][0]',
) 'conv4_block6_3_bn[0][0]']
conv4_block6_out (Activation) (None, 16, 16, 1024 0 ['conv4_block6_add[0][0]']
)
conv5_block1_1_conv (Conv2D) (None, 8, 8, 512) 524800 ['conv4_block6_out[0][0]']
conv5_block1_1_bn (BatchNormal (None, 8, 8, 512) 2048 ['conv5_block1_1_conv[0][0]']
ization)
conv5_block1_1_relu (Activatio (None, 8, 8, 512) 0 ['conv5_block1_1_bn[0][0]']
n)
conv5_block1_2_conv (Conv2D) (None, 8, 8, 512) 2359808 ['conv5_block1_1_relu[0][0]']
conv5_block1_2_bn (BatchNormal (None, 8, 8, 512) 2048 ['conv5_block1_2_conv[0][0]']
ization)
conv5_block1_2_relu (Activatio (None, 8, 8, 512) 0 ['conv5_block1_2_bn[0][0]']
n)
conv5_block1_0_conv (Conv2D) (None, 8, 8, 2048) 2099200 ['conv4_block6_out[0][0]']
conv5_block1_3_conv (Conv2D) (None, 8, 8, 2048) 1050624 ['conv5_block1_2_relu[0][0]']
conv5_block1_0_bn (BatchNormal (None, 8, 8, 2048) 8192 ['conv5_block1_0_conv[0][0]']
ization)
conv5_block1_3_bn (BatchNormal (None, 8, 8, 2048) 8192 ['conv5_block1_3_conv[0][0]']
ization)
conv5_block1_add (Add) (None, 8, 8, 2048) 0 ['conv5_block1_0_bn[0][0]',
'conv5_block1_3_bn[0][0]']
conv5_block1_out (Activation) (None, 8, 8, 2048) 0 ['conv5_block1_add[0][0]']
conv5_block2_1_conv (Conv2D) (None, 8, 8, 512) 1049088 ['conv5_block1_out[0][0]']
conv5_block2_1_bn (BatchNormal (None, 8, 8, 512) 2048 ['conv5_block2_1_conv[0][0]']
ization)
conv5_block2_1_relu (Activatio (None, 8, 8, 512) 0 ['conv5_block2_1_bn[0][0]']
n)
conv5_block2_2_conv (Conv2D) (None, 8, 8, 512) 2359808 ['conv5_block2_1_relu[0][0]']
conv5_block2_2_bn (BatchNormal (None, 8, 8, 512) 2048 ['conv5_block2_2_conv[0][0]']
ization)
conv5_block2_2_relu (Activatio (None, 8, 8, 512) 0 ['conv5_block2_2_bn[0][0]']
n)
conv5_block2_3_conv (Conv2D) (None, 8, 8, 2048) 1050624 ['conv5_block2_2_relu[0][0]']
conv5_block2_3_bn (BatchNormal (None, 8, 8, 2048) 8192 ['conv5_block2_3_conv[0][0]']
ization)
conv5_block2_add (Add) (None, 8, 8, 2048) 0 ['conv5_block1_out[0][0]',
'conv5_block2_3_bn[0][0]']
conv5_block2_out (Activation) (None, 8, 8, 2048) 0 ['conv5_block2_add[0][0]']
conv5_block3_1_conv (Conv2D) (None, 8, 8, 512) 1049088 ['conv5_block2_out[0][0]']
conv5_block3_1_bn (BatchNormal (None, 8, 8, 512) 2048 ['conv5_block3_1_conv[0][0]']
ization)
conv5_block3_1_relu (Activatio (None, 8, 8, 512) 0 ['conv5_block3_1_bn[0][0]']
n)
conv5_block3_2_conv (Conv2D) (None, 8, 8, 512) 2359808 ['conv5_block3_1_relu[0][0]']
conv5_block3_2_bn (BatchNormal (None, 8, 8, 512) 2048 ['conv5_block3_2_conv[0][0]']
ization)
conv5_block3_2_relu (Activatio (None, 8, 8, 512) 0 ['conv5_block3_2_bn[0][0]']
n)
conv5_block3_3_conv (Conv2D) (None, 8, 8, 2048) 1050624 ['conv5_block3_2_relu[0][0]']
conv5_block3_3_bn (BatchNormal (None, 8, 8, 2048) 8192 ['conv5_block3_3_conv[0][0]']
ization)
conv5_block3_add (Add) (None, 8, 8, 2048) 0 ['conv5_block2_out[0][0]',
'conv5_block3_3_bn[0][0]']
conv5_block3_out (Activation) (None, 8, 8, 2048) 0 ['conv5_block3_add[0][0]']
average_pooling2d (AveragePool (None, 2, 2, 2048) 0 ['conv5_block3_out[0][0]']
ing2D)
Flatten (Flatten) (None, 8192) 0 ['average_pooling2d[0][0]']
dense (Dense) (None, 256) 2097408 ['Flatten[0][0]']
dropout (Dropout) (None, 256) 0 ['dense[0][0]']
dense_1 (Dense) (None, 256) 65792 ['dropout[0][0]']
dropout_1 (Dropout) (None, 256) 0 ['dense_1[0][0]']
dense_2 (Dense) (None, 2) 514 ['dropout_1[0][0]']
==================================================================================================
Total params: 25,751,426
Trainable params: 25,698,306
Non-trainable params: 53,120
__________________________________________________________________________________________________
earlystopping = EarlyStopping(monitor='val_loss',
mode='min',
verbose=1,
patience=15
)
checkpointer = ModelCheckpoint(filepath="clf-resnet-weights.hdf5",
verbose=1,
save_best_only=True
)
reduce_lr = ReduceLROnPlateau(monitor='val_loss',
mode='min',
verbose=1,
patience=10,
min_delta=0.0001,
factor=0.2
)
callbacks = [checkpointer, earlystopping, reduce_lr]
h = model.fit(train_generator,
steps_per_epoch= train_generator.n // train_generator.batch_size,
epochs = 50,
validation_data= valid_generator,
validation_steps= valid_generator.n // valid_generator.batch_size,
callbacks=[checkpointer, earlystopping])
# saving model achitecture in json file
model_json = model.to_json()
with open("clf-resnet-model.json", "w") as json_file:
json_file.write(model_json)
h.history.keys()
plt.figure(figsize=(12,5))
plt.subplot(1,2,1)
plt.plot(h.history['loss']);
plt.plot(h.history['val_loss']);
plt.title("Classification Model LOSS");
plt.ylabel("loss");
plt.xlabel("Epochs");
plt.legend(['train', 'val']);
plt.subplot(1,2,2)
plt.plot(h.history['accuracy']);
plt.plot(h.history['val_accuracy']);
plt.title("Classification Model Acc");
plt.ylabel("Accuracy");
plt.xlabel("Epochs");
plt.legend(['train', 'val']);
_, acc = model.evaluate(test_generator)
print("Test accuracy : {} %".format(acc*100))
prediction = model.predict(test_generator)
pred = np.argmax(prediction, axis=1)
#pred = np.asarray(pred).astype('str')
original = np.asarray(test['mask']).astype('int')
from sklearn.metrics import accuracy_score, confusion_matrix, classification_report
accuracy = accuracy_score(original, pred)
print(accuracy)
cm = confusion_matrix(original, pred)
report = classification_report(original, pred, labels = [0,1])
print(report)
plt.figure(figsize = (5,5))
sns.heatmap(cm, annot=True);
brain_df_mask = brain_df[brain_df['mask'] == 1]
brain_df_mask.shape
(1373, 4)
# creating test, train and val sets
X_train, X_val = train_test_split(brain_df_mask, test_size=0.15)
X_test, X_val = train_test_split(X_val, test_size=0.5)
print("Train size is {}, valid size is {} & test size is {}".format(len(X_train), len(X_val), len(X_test)))
train_ids = list(X_train.image_path)
train_mask = list(X_train.mask_path)
val_ids = list(X_val.image_path)
val_mask= list(X_val.mask_path)
Train size is 1167, valid size is 103 & test size is 103
class DataGenerator(tf.keras.utils.Sequence):
def __init__(self, ids , mask, image_dir = './', batch_size = 16, img_h = 256, img_w = 256, shuffle = True):
self.ids = ids
self.mask = mask
self.image_dir = image_dir
self.batch_size = batch_size
self.img_h = img_h
self.img_w = img_w
self.shuffle = shuffle
self.on_epoch_end()
def __len__(self):
'Get the number of batches per epoch'
return int(np.floor(len(self.ids)) / self.batch_size)
def __getitem__(self, index):
'Generate a batch of data'
#generate index of batch_size length
indexes = self.indexes[index* self.batch_size : (index+1) * self.batch_size]
#get the ImageId corresponding to the indexes created above based on batch size
list_ids = [self.ids[i] for i in indexes]
#get the MaskId corresponding to the indexes created above based on batch size
list_mask = [self.mask[i] for i in indexes]
#generate data for the X(features) and y(label)
X, y = self.__data_generation(list_ids, list_mask)
#returning the data
return X, y
def on_epoch_end(self):
'Used for updating the indices after each epoch, once at the beginning as well as at the end of each epoch'
#getting the array of indices based on the input dataframe
self.indexes = np.arange(len(self.ids))
#if shuffle is true, shuffle the indices
if self.shuffle:
np.random.shuffle(self.indexes)
def __data_generation(self, list_ids, list_mask):
'generate the data corresponding the indexes in a given batch of images'
# create empty arrays of shape (batch_size,height,width,depth)
#Depth is 3 for input and depth is taken as 1 for output becasue mask consist only of 1 channel.
X = np.empty((self.batch_size, self.img_h, self.img_w, 3))
y = np.empty((self.batch_size, self.img_h, self.img_w, 1))
#iterate through the dataframe rows, whose size is equal to the batch_size
for i in range(len(list_ids)):
#path of the image
img_path = str(list_ids[i])
#mask path
mask_path = str(list_mask[i])
#reading the original image and the corresponding mask image
img = io.imread(img_path)
mask = io.imread(mask_path)
#resizing and coverting them to array of type float64
img = cv2.resize(img,(self.img_h,self.img_w))
img = np.array(img, dtype = np.float64)
mask = cv2.resize(mask,(self.img_h,self.img_w))
mask = np.array(mask, dtype = np.float64)
#standardising
img -= img.mean()
img /= img.std()
mask -= mask.mean()
mask /= mask.std()
#Adding image to the empty array
X[i,] = img
#expanding the dimnesion of the image from (256,256) to (256,256,1)
y[i,] = np.expand_dims(mask, axis = 2)
#normalizing y
y = (y > 0).astype(int)
return X, y
train_data = DataGenerator(train_ids, train_mask)
val_data = DataGenerator(val_ids, val_mask)
# lets create model now
def resblock(X, f):
'''
function for creating res block
'''
X_copy = X #copy of input
# main path
X = Conv2D(f, kernel_size=(1,1), kernel_initializer='he_normal')(X)
X = BatchNormalization()(X)
X = Activation('relu')(X)
X = Conv2D(f, kernel_size=(3,3), padding='same', kernel_initializer='he_normal')(X)
X = BatchNormalization()(X)
# shortcut path
X_copy = Conv2D(f, kernel_size=(1,1), kernel_initializer='he_normal')(X_copy)
X_copy = BatchNormalization()(X_copy)
# Adding the output from main path and short path together
X = Add()([X, X_copy])
X = Activation('relu')(X)
return X
def upsample_concat(x, skip):
'''
funtion for upsampling image
'''
X = UpSampling2D((2,2))(x)
merge = Concatenate()([X, skip])
return merge
input_shape = (256,256,3)
X_input = Input(input_shape) #iniating tensor of input shape
# Stage 1
conv_1 = Conv2D(16, 3, activation='relu', padding='same', kernel_initializer='he_normal')(X_input)
conv_1 = BatchNormalization()(conv_1)
conv_1 = Conv2D(16, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv_1)
conv_1 = BatchNormalization()(conv_1)
pool_1 = MaxPool2D((2,2))(conv_1)
# stage 2
conv_2 = resblock(pool_1, 32)
pool_2 = MaxPool2D((2,2))(conv_2)
# Stage 3
conv_3 = resblock(pool_2, 64)
pool_3 = MaxPool2D((2,2))(conv_3)
# Stage 4
conv_4 = resblock(pool_3, 128)
pool_4 = MaxPool2D((2,2))(conv_4)
# Stage 5 (bottle neck)
conv_5 = resblock(pool_4, 256)
# Upsample Stage 1
up_1 = upsample_concat(conv_5, conv_4)
up_1 = resblock(up_1, 128)
# Upsample Stage 2
up_2 = upsample_concat(up_1, conv_3)
up_2 = resblock(up_2, 64)
# Upsample Stage 3
up_3 = upsample_concat(up_2, conv_2)
up_3 = resblock(up_3, 32)
# Upsample Stage 4
up_4 = upsample_concat(up_3, conv_1)
up_4 = resblock(up_4, 16)
# final output
out = Conv2D(1, (1,1), kernel_initializer='he_normal', padding='same', activation='sigmoid')(up_4)
seg_model = Model(X_input, out)
seg_model.summary()
Model: "model_1"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_2 (InputLayer) [(None, 256, 256, 3 0 []
)]
conv2d (Conv2D) (None, 256, 256, 16 448 ['input_2[0][0]']
)
batch_normalization (BatchNorm (None, 256, 256, 16 64 ['conv2d[0][0]']
alization) )
conv2d_1 (Conv2D) (None, 256, 256, 16 2320 ['batch_normalization[0][0]']
)
batch_normalization_1 (BatchNo (None, 256, 256, 16 64 ['conv2d_1[0][0]']
rmalization) )
max_pooling2d (MaxPooling2D) (None, 128, 128, 16 0 ['batch_normalization_1[0][0]']
)
conv2d_2 (Conv2D) (None, 128, 128, 32 544 ['max_pooling2d[0][0]']
)
batch_normalization_2 (BatchNo (None, 128, 128, 32 128 ['conv2d_2[0][0]']
rmalization) )
activation (Activation) (None, 128, 128, 32 0 ['batch_normalization_2[0][0]']
)
conv2d_3 (Conv2D) (None, 128, 128, 32 9248 ['activation[0][0]']
)
conv2d_4 (Conv2D) (None, 128, 128, 32 544 ['max_pooling2d[0][0]']
)
batch_normalization_3 (BatchNo (None, 128, 128, 32 128 ['conv2d_3[0][0]']
rmalization) )
batch_normalization_4 (BatchNo (None, 128, 128, 32 128 ['conv2d_4[0][0]']
rmalization) )
add (Add) (None, 128, 128, 32 0 ['batch_normalization_3[0][0]',
) 'batch_normalization_4[0][0]']
activation_1 (Activation) (None, 128, 128, 32 0 ['add[0][0]']
)
max_pooling2d_1 (MaxPooling2D) (None, 64, 64, 32) 0 ['activation_1[0][0]']
conv2d_5 (Conv2D) (None, 64, 64, 64) 2112 ['max_pooling2d_1[0][0]']
batch_normalization_5 (BatchNo (None, 64, 64, 64) 256 ['conv2d_5[0][0]']
rmalization)
activation_2 (Activation) (None, 64, 64, 64) 0 ['batch_normalization_5[0][0]']
conv2d_6 (Conv2D) (None, 64, 64, 64) 36928 ['activation_2[0][0]']
conv2d_7 (Conv2D) (None, 64, 64, 64) 2112 ['max_pooling2d_1[0][0]']
batch_normalization_6 (BatchNo (None, 64, 64, 64) 256 ['conv2d_6[0][0]']
rmalization)
batch_normalization_7 (BatchNo (None, 64, 64, 64) 256 ['conv2d_7[0][0]']
rmalization)
add_1 (Add) (None, 64, 64, 64) 0 ['batch_normalization_6[0][0]',
'batch_normalization_7[0][0]']
activation_3 (Activation) (None, 64, 64, 64) 0 ['add_1[0][0]']
max_pooling2d_2 (MaxPooling2D) (None, 32, 32, 64) 0 ['activation_3[0][0]']
conv2d_8 (Conv2D) (None, 32, 32, 128) 8320 ['max_pooling2d_2[0][0]']
batch_normalization_8 (BatchNo (None, 32, 32, 128) 512 ['conv2d_8[0][0]']
rmalization)
activation_4 (Activation) (None, 32, 32, 128) 0 ['batch_normalization_8[0][0]']
conv2d_9 (Conv2D) (None, 32, 32, 128) 147584 ['activation_4[0][0]']
conv2d_10 (Conv2D) (None, 32, 32, 128) 8320 ['max_pooling2d_2[0][0]']
batch_normalization_9 (BatchNo (None, 32, 32, 128) 512 ['conv2d_9[0][0]']
rmalization)
batch_normalization_10 (BatchN (None, 32, 32, 128) 512 ['conv2d_10[0][0]']
ormalization)
add_2 (Add) (None, 32, 32, 128) 0 ['batch_normalization_9[0][0]',
'batch_normalization_10[0][0]']
activation_5 (Activation) (None, 32, 32, 128) 0 ['add_2[0][0]']
max_pooling2d_3 (MaxPooling2D) (None, 16, 16, 128) 0 ['activation_5[0][0]']
conv2d_11 (Conv2D) (None, 16, 16, 256) 33024 ['max_pooling2d_3[0][0]']
batch_normalization_11 (BatchN (None, 16, 16, 256) 1024 ['conv2d_11[0][0]']
ormalization)
activation_6 (Activation) (None, 16, 16, 256) 0 ['batch_normalization_11[0][0]']
conv2d_12 (Conv2D) (None, 16, 16, 256) 590080 ['activation_6[0][0]']
conv2d_13 (Conv2D) (None, 16, 16, 256) 33024 ['max_pooling2d_3[0][0]']
batch_normalization_12 (BatchN (None, 16, 16, 256) 1024 ['conv2d_12[0][0]']
ormalization)
batch_normalization_13 (BatchN (None, 16, 16, 256) 1024 ['conv2d_13[0][0]']
ormalization)
add_3 (Add) (None, 16, 16, 256) 0 ['batch_normalization_12[0][0]',
'batch_normalization_13[0][0]']
activation_7 (Activation) (None, 16, 16, 256) 0 ['add_3[0][0]']
up_sampling2d (UpSampling2D) (None, 32, 32, 256) 0 ['activation_7[0][0]']
concatenate (Concatenate) (None, 32, 32, 384) 0 ['up_sampling2d[0][0]',
'activation_5[0][0]']
conv2d_14 (Conv2D) (None, 32, 32, 128) 49280 ['concatenate[0][0]']
batch_normalization_14 (BatchN (None, 32, 32, 128) 512 ['conv2d_14[0][0]']
ormalization)
activation_8 (Activation) (None, 32, 32, 128) 0 ['batch_normalization_14[0][0]']
conv2d_15 (Conv2D) (None, 32, 32, 128) 147584 ['activation_8[0][0]']
conv2d_16 (Conv2D) (None, 32, 32, 128) 49280 ['concatenate[0][0]']
batch_normalization_15 (BatchN (None, 32, 32, 128) 512 ['conv2d_15[0][0]']
ormalization)
batch_normalization_16 (BatchN (None, 32, 32, 128) 512 ['conv2d_16[0][0]']
ormalization)
add_4 (Add) (None, 32, 32, 128) 0 ['batch_normalization_15[0][0]',
'batch_normalization_16[0][0]']
activation_9 (Activation) (None, 32, 32, 128) 0 ['add_4[0][0]']
up_sampling2d_1 (UpSampling2D) (None, 64, 64, 128) 0 ['activation_9[0][0]']
concatenate_1 (Concatenate) (None, 64, 64, 192) 0 ['up_sampling2d_1[0][0]',
'activation_3[0][0]']
conv2d_17 (Conv2D) (None, 64, 64, 64) 12352 ['concatenate_1[0][0]']
batch_normalization_17 (BatchN (None, 64, 64, 64) 256 ['conv2d_17[0][0]']
ormalization)
activation_10 (Activation) (None, 64, 64, 64) 0 ['batch_normalization_17[0][0]']
conv2d_18 (Conv2D) (None, 64, 64, 64) 36928 ['activation_10[0][0]']
conv2d_19 (Conv2D) (None, 64, 64, 64) 12352 ['concatenate_1[0][0]']
batch_normalization_18 (BatchN (None, 64, 64, 64) 256 ['conv2d_18[0][0]']
ormalization)
batch_normalization_19 (BatchN (None, 64, 64, 64) 256 ['conv2d_19[0][0]']
ormalization)
add_5 (Add) (None, 64, 64, 64) 0 ['batch_normalization_18[0][0]',
'batch_normalization_19[0][0]']
activation_11 (Activation) (None, 64, 64, 64) 0 ['add_5[0][0]']
up_sampling2d_2 (UpSampling2D) (None, 128, 128, 64 0 ['activation_11[0][0]']
)
concatenate_2 (Concatenate) (None, 128, 128, 96 0 ['up_sampling2d_2[0][0]',
) 'activation_1[0][0]']
conv2d_20 (Conv2D) (None, 128, 128, 32 3104 ['concatenate_2[0][0]']
)
batch_normalization_20 (BatchN (None, 128, 128, 32 128 ['conv2d_20[0][0]']
ormalization) )
activation_12 (Activation) (None, 128, 128, 32 0 ['batch_normalization_20[0][0]']
)
conv2d_21 (Conv2D) (None, 128, 128, 32 9248 ['activation_12[0][0]']
)
conv2d_22 (Conv2D) (None, 128, 128, 32 3104 ['concatenate_2[0][0]']
)
batch_normalization_21 (BatchN (None, 128, 128, 32 128 ['conv2d_21[0][0]']
ormalization) )
batch_normalization_22 (BatchN (None, 128, 128, 32 128 ['conv2d_22[0][0]']
ormalization) )
add_6 (Add) (None, 128, 128, 32 0 ['batch_normalization_21[0][0]',
) 'batch_normalization_22[0][0]']
activation_13 (Activation) (None, 128, 128, 32 0 ['add_6[0][0]']
)
up_sampling2d_3 (UpSampling2D) (None, 256, 256, 32 0 ['activation_13[0][0]']
)
concatenate_3 (Concatenate) (None, 256, 256, 48 0 ['up_sampling2d_3[0][0]',
) 'batch_normalization_1[0][0]']
conv2d_23 (Conv2D) (None, 256, 256, 16 784 ['concatenate_3[0][0]']
)
batch_normalization_23 (BatchN (None, 256, 256, 16 64 ['conv2d_23[0][0]']
ormalization) )
activation_14 (Activation) (None, 256, 256, 16 0 ['batch_normalization_23[0][0]']
)
conv2d_24 (Conv2D) (None, 256, 256, 16 2320 ['activation_14[0][0]']
)
conv2d_25 (Conv2D) (None, 256, 256, 16 784 ['concatenate_3[0][0]']
)
batch_normalization_24 (BatchN (None, 256, 256, 16 64 ['conv2d_24[0][0]']
ormalization) )
batch_normalization_25 (BatchN (None, 256, 256, 16 64 ['conv2d_25[0][0]']
ormalization) )
add_7 (Add) (None, 256, 256, 16 0 ['batch_normalization_24[0][0]',
) 'batch_normalization_25[0][0]']
activation_15 (Activation) (None, 256, 256, 16 0 ['add_7[0][0]']
)
conv2d_26 (Conv2D) (None, 256, 256, 1) 17 ['activation_15[0][0]']
==================================================================================================
Total params: 1,210,513
Trainable params: 1,206,129
Non-trainable params: 4,384
__________________________________________________________________________________________________
from keras.losses import binary_crossentropy
epsilon = 1e-5
smooth = 1
def tversky(y_true, y_pred):
y_true_pos = K.flatten(y_true)
y_pred_pos = K.flatten(y_pred)
true_pos = K.sum(y_true_pos * y_pred_pos)
false_neg = K.sum(y_true_pos * (1-y_pred_pos))
false_pos = K.sum((1-y_true_pos)*y_pred_pos)
alpha = 0.7
return (true_pos + smooth)/(true_pos + alpha*false_neg + (1-alpha)*false_pos + smooth)
def focal_tversky(y_true,y_pred):
y_true = tf.cast(y_true, tf.float32)
y_pred = tf.cast(y_pred, tf.float32)
pt_1 = tversky(y_true, y_pred)
gamma = 0.75
return K.pow((1-pt_1), gamma)
def tversky_loss(y_true, y_pred):
return 1 - tversky(y_true,y_pred)
# compling model and callbacks functions
adam = tf.keras.optimizers.Adam(lr = 0.05, epsilon = 0.1)
seg_model.compile(optimizer = adam,
loss = focal_tversky,
metrics = [tversky]
)
#callbacks
earlystopping = EarlyStopping(monitor='val_loss',
mode='min',
verbose=1,
patience=20
)
# save the best model with lower validation loss
checkpointer = ModelCheckpoint(filepath="ResUNet-segModel-weights.hdf5",
verbose=1,
save_best_only=True
)
reduce_lr = ReduceLROnPlateau(monitor='val_loss',
mode='min',
verbose=1,
patience=10,
min_delta=0.0001,
factor=0.2
)
C:\Users\will.pratt\Documents\temp\lib\site-packages\keras\optimizer_v2\adam.py:105: UserWarning: The `lr` argument is deprecated, use `learning_rate` instead. super(Adam, self).__init__(name, **kwargs)
h = seg_model.fit(train_data,
epochs = 60,
validation_data = val_data,
callbacks = [checkpointer, earlystopping, reduce_lr]
)
Epoch 1/60 72/72 [==============================] - ETA: 0s - loss: 0.8465 - tversky: 0.1981 Epoch 00001: val_loss improved from inf to 0.80569, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 213s 3s/step - loss: 0.8465 - tversky: 0.1981 - val_loss: 0.8057 - val_tversky: 0.2502 - lr: 0.0500
C:\Users\will.pratt\Documents\temp\lib\site-packages\keras\engine\functional.py:1410: CustomMaskWarning: Custom mask layers require a config and must override get_config. When loading, the custom mask layer must be passed to the custom_objects argument. layer_config = serialize_layer_fn(layer)
Epoch 2/60 72/72 [==============================] - ETA: 0s - loss: 0.5110 - tversky: 0.5868 Epoch 00002: val_loss improved from 0.80569 to 0.67348, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 203s 3s/step - loss: 0.5110 - tversky: 0.5868 - val_loss: 0.6735 - val_tversky: 0.4096 - lr: 0.0500 Epoch 3/60 72/72 [==============================] - ETA: 0s - loss: 0.3790 - tversky: 0.7232 Epoch 00003: val_loss improved from 0.67348 to 0.56456, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 212s 3s/step - loss: 0.3790 - tversky: 0.7232 - val_loss: 0.5646 - val_tversky: 0.5327 - lr: 0.0500 Epoch 4/60 72/72 [==============================] - ETA: 0s - loss: 0.3321 - tversky: 0.7681 Epoch 00004: val_loss improved from 0.56456 to 0.48963, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 217s 3s/step - loss: 0.3321 - tversky: 0.7681 - val_loss: 0.4896 - val_tversky: 0.6131 - lr: 0.0500 Epoch 5/60 72/72 [==============================] - ETA: 0s - loss: 0.2940 - tversky: 0.8026 Epoch 00005: val_loss improved from 0.48963 to 0.30965, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 207s 3s/step - loss: 0.2940 - tversky: 0.8026 - val_loss: 0.3096 - val_tversky: 0.7899 - lr: 0.0500 Epoch 6/60 72/72 [==============================] - ETA: 0s - loss: 0.2744 - tversky: 0.8198 Epoch 00006: val_loss did not improve from 0.30965 72/72 [==============================] - 205s 3s/step - loss: 0.2744 - tversky: 0.8198 - val_loss: 0.3544 - val_tversky: 0.7489 - lr: 0.0500 Epoch 7/60 72/72 [==============================] - ETA: 0s - loss: 0.2622 - tversky: 0.8308 Epoch 00007: val_loss improved from 0.30965 to 0.26843, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 206s 3s/step - loss: 0.2622 - tversky: 0.8308 - val_loss: 0.2684 - val_tversky: 0.8241 - lr: 0.0500 Epoch 8/60 72/72 [==============================] - ETA: 0s - loss: 0.2349 - tversky: 0.8541 Epoch 00008: val_loss did not improve from 0.26843 72/72 [==============================] - 210s 3s/step - loss: 0.2349 - tversky: 0.8541 - val_loss: 0.2881 - val_tversky: 0.8084 - lr: 0.0500 Epoch 9/60 72/72 [==============================] - ETA: 0s - loss: 0.2205 - tversky: 0.8657 Epoch 00009: val_loss improved from 0.26843 to 0.25063, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 215s 3s/step - loss: 0.2205 - tversky: 0.8657 - val_loss: 0.2506 - val_tversky: 0.8411 - lr: 0.0500 Epoch 10/60 72/72 [==============================] - ETA: 0s - loss: 0.2102 - tversky: 0.8743 Epoch 00010: val_loss improved from 0.25063 to 0.23099, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 210s 3s/step - loss: 0.2102 - tversky: 0.8743 - val_loss: 0.2310 - val_tversky: 0.8577 - lr: 0.0500 Epoch 11/60 72/72 [==============================] - ETA: 0s - loss: 0.2040 - tversky: 0.8788 Epoch 00011: val_loss did not improve from 0.23099 72/72 [==============================] - 209s 3s/step - loss: 0.2040 - tversky: 0.8788 - val_loss: 0.2318 - val_tversky: 0.8566 - lr: 0.0500 Epoch 12/60 72/72 [==============================] - ETA: 0s - loss: 0.1938 - tversky: 0.8868 Epoch 00012: val_loss did not improve from 0.23099 72/72 [==============================] - 209s 3s/step - loss: 0.1938 - tversky: 0.8868 - val_loss: 0.2992 - val_tversky: 0.7960 - lr: 0.0500 Epoch 13/60 72/72 [==============================] - ETA: 0s - loss: 0.1877 - tversky: 0.8919 Epoch 00013: val_loss improved from 0.23099 to 0.21617, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 204s 3s/step - loss: 0.1877 - tversky: 0.8919 - val_loss: 0.2162 - val_tversky: 0.8697 - lr: 0.0500 Epoch 14/60 72/72 [==============================] - ETA: 0s - loss: 0.1886 - tversky: 0.8910 Epoch 00014: val_loss did not improve from 0.21617 72/72 [==============================] - 204s 3s/step - loss: 0.1886 - tversky: 0.8910 - val_loss: 0.2312 - val_tversky: 0.8569 - lr: 0.0500 Epoch 15/60 72/72 [==============================] - ETA: 0s - loss: 0.1698 - tversky: 0.9054 Epoch 00015: val_loss improved from 0.21617 to 0.19208, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 205s 3s/step - loss: 0.1698 - tversky: 0.9054 - val_loss: 0.1921 - val_tversky: 0.8888 - lr: 0.0500 Epoch 16/60 72/72 [==============================] - ETA: 0s - loss: 0.1683 - tversky: 0.9064 Epoch 00016: val_loss improved from 0.19208 to 0.18824, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 207s 3s/step - loss: 0.1683 - tversky: 0.9064 - val_loss: 0.1882 - val_tversky: 0.8914 - lr: 0.0500 Epoch 17/60 72/72 [==============================] - ETA: 0s - loss: 0.1581 - tversky: 0.9140 Epoch 00017: val_loss did not improve from 0.18824 72/72 [==============================] - 207s 3s/step - loss: 0.1581 - tversky: 0.9140 - val_loss: 0.2038 - val_tversky: 0.8795 - lr: 0.0500 Epoch 18/60 72/72 [==============================] - ETA: 0s - loss: 0.1530 - tversky: 0.9177 Epoch 00018: val_loss improved from 0.18824 to 0.18632, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 208s 3s/step - loss: 0.1530 - tversky: 0.9177 - val_loss: 0.1863 - val_tversky: 0.8930 - lr: 0.0500 Epoch 19/60 72/72 [==============================] - ETA: 0s - loss: 0.1497 - tversky: 0.9201 Epoch 00019: val_loss improved from 0.18632 to 0.18138, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 206s 3s/step - loss: 0.1497 - tversky: 0.9201 - val_loss: 0.1814 - val_tversky: 0.8972 - lr: 0.0500 Epoch 20/60 72/72 [==============================] - ETA: 0s - loss: 0.1493 - tversky: 0.9204 Epoch 00020: val_loss improved from 0.18138 to 0.17366, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 205s 3s/step - loss: 0.1493 - tversky: 0.9204 - val_loss: 0.1737 - val_tversky: 0.9029 - lr: 0.0500 Epoch 21/60 72/72 [==============================] - ETA: 0s - loss: 0.1434 - tversky: 0.9245 Epoch 00021: val_loss did not improve from 0.17366 72/72 [==============================] - 206s 3s/step - loss: 0.1434 - tversky: 0.9245 - val_loss: 0.1925 - val_tversky: 0.8880 - lr: 0.0500 Epoch 22/60 72/72 [==============================] - ETA: 0s - loss: 0.1457 - tversky: 0.9228 Epoch 00022: val_loss did not improve from 0.17366 72/72 [==============================] - 206s 3s/step - loss: 0.1457 - tversky: 0.9228 - val_loss: 0.1815 - val_tversky: 0.8968 - lr: 0.0500 Epoch 23/60 72/72 [==============================] - ETA: 0s - loss: 0.1394 - tversky: 0.9273 Epoch 00023: val_loss did not improve from 0.17366 72/72 [==============================] - 206s 3s/step - loss: 0.1394 - tversky: 0.9273 - val_loss: 0.1854 - val_tversky: 0.8938 - lr: 0.0500 Epoch 24/60 72/72 [==============================] - ETA: 0s - loss: 0.1321 - tversky: 0.9324 Epoch 00024: val_loss improved from 0.17366 to 0.16542, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 204s 3s/step - loss: 0.1321 - tversky: 0.9324 - val_loss: 0.1654 - val_tversky: 0.9088 - lr: 0.0500 Epoch 25/60 72/72 [==============================] - ETA: 0s - loss: 0.1273 - tversky: 0.9357 Epoch 00025: val_loss did not improve from 0.16542 72/72 [==============================] - 205s 3s/step - loss: 0.1273 - tversky: 0.9357 - val_loss: 0.1728 - val_tversky: 0.9036 - lr: 0.0500 Epoch 26/60 72/72 [==============================] - ETA: 0s - loss: 0.1250 - tversky: 0.9373 Epoch 00026: val_loss did not improve from 0.16542 72/72 [==============================] - 207s 3s/step - loss: 0.1250 - tversky: 0.9373 - val_loss: 0.1799 - val_tversky: 0.8982 - lr: 0.0500 Epoch 27/60 72/72 [==============================] - ETA: 0s - loss: 0.1226 - tversky: 0.9388 Epoch 00027: val_loss did not improve from 0.16542 72/72 [==============================] - 205s 3s/step - loss: 0.1226 - tversky: 0.9388 - val_loss: 0.1835 - val_tversky: 0.8949 - lr: 0.0500 Epoch 28/60 72/72 [==============================] - ETA: 0s - loss: 0.1292 - tversky: 0.9344 Epoch 00028: val_loss did not improve from 0.16542 72/72 [==============================] - 207s 3s/step - loss: 0.1292 - tversky: 0.9344 - val_loss: 0.1730 - val_tversky: 0.9025 - lr: 0.0500 Epoch 29/60 72/72 [==============================] - ETA: 0s - loss: 0.1199 - tversky: 0.9406 Epoch 00029: val_loss improved from 0.16542 to 0.16378, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 206s 3s/step - loss: 0.1199 - tversky: 0.9406 - val_loss: 0.1638 - val_tversky: 0.9100 - lr: 0.0500 Epoch 30/60 72/72 [==============================] - ETA: 0s - loss: 0.1169 - tversky: 0.9425 Epoch 00030: val_loss improved from 0.16378 to 0.15314, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 209s 3s/step - loss: 0.1169 - tversky: 0.9425 - val_loss: 0.1531 - val_tversky: 0.9178 - lr: 0.0500 Epoch 31/60 72/72 [==============================] - ETA: 0s - loss: 0.1188 - tversky: 0.9413 Epoch 00031: val_loss did not improve from 0.15314 72/72 [==============================] - 205s 3s/step - loss: 0.1188 - tversky: 0.9413 - val_loss: 0.1826 - val_tversky: 0.8940 - lr: 0.0500 Epoch 32/60 72/72 [==============================] - ETA: 0s - loss: 0.1111 - tversky: 0.9463 Epoch 00032: val_loss did not improve from 0.15314 72/72 [==============================] - 205s 3s/step - loss: 0.1111 - tversky: 0.9463 - val_loss: 0.1721 - val_tversky: 0.9040 - lr: 0.0500 Epoch 33/60 72/72 [==============================] - ETA: 0s - loss: 0.1099 - tversky: 0.9471 Epoch 00033: val_loss did not improve from 0.15314 72/72 [==============================] - 205s 3s/step - loss: 0.1099 - tversky: 0.9471 - val_loss: 0.1637 - val_tversky: 0.9101 - lr: 0.0500 Epoch 34/60 72/72 [==============================] - ETA: 0s - loss: 0.1078 - tversky: 0.9485 Epoch 00034: val_loss did not improve from 0.15314 72/72 [==============================] - 204s 3s/step - loss: 0.1078 - tversky: 0.9485 - val_loss: 0.1841 - val_tversky: 0.8945 - lr: 0.0500 Epoch 35/60 72/72 [==============================] - ETA: 0s - loss: 0.1075 - tversky: 0.9485 Epoch 00035: val_loss did not improve from 0.15314 72/72 [==============================] - 207s 3s/step - loss: 0.1075 - tversky: 0.9485 - val_loss: 0.1794 - val_tversky: 0.8981 - lr: 0.0500 Epoch 36/60 72/72 [==============================] - ETA: 0s - loss: 0.1020 - tversky: 0.9521 Epoch 00036: val_loss did not improve from 0.15314 72/72 [==============================] - 206s 3s/step - loss: 0.1020 - tversky: 0.9521 - val_loss: 0.1653 - val_tversky: 0.9090 - lr: 0.0500 Epoch 37/60 72/72 [==============================] - ETA: 0s - loss: 0.1007 - tversky: 0.9529 Epoch 00037: val_loss did not improve from 0.15314 72/72 [==============================] - 205s 3s/step - loss: 0.1007 - tversky: 0.9529 - val_loss: 0.1722 - val_tversky: 0.9033 - lr: 0.0500 Epoch 38/60 72/72 [==============================] - ETA: 0s - loss: 0.1009 - tversky: 0.9529 Epoch 00038: val_loss did not improve from 0.15314 72/72 [==============================] - 205s 3s/step - loss: 0.1009 - tversky: 0.9529 - val_loss: 0.1736 - val_tversky: 0.9020 - lr: 0.0500 Epoch 39/60 72/72 [==============================] - ETA: 0s - loss: 0.0978 - tversky: 0.9548 Epoch 00039: val_loss improved from 0.15314 to 0.15117, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 207s 3s/step - loss: 0.0978 - tversky: 0.9548 - val_loss: 0.1512 - val_tversky: 0.9191 - lr: 0.0500 Epoch 40/60 72/72 [==============================] - ETA: 0s - loss: 0.0947 - tversky: 0.9567 Epoch 00040: val_loss did not improve from 0.15117 72/72 [==============================] - 206s 3s/step - loss: 0.0947 - tversky: 0.9567 - val_loss: 0.1689 - val_tversky: 0.9061 - lr: 0.0500 Epoch 41/60 72/72 [==============================] - ETA: 0s - loss: 0.0986 - tversky: 0.9543 Epoch 00041: val_loss did not improve from 0.15117 72/72 [==============================] - 206s 3s/step - loss: 0.0986 - tversky: 0.9543 - val_loss: 0.1593 - val_tversky: 0.9133 - lr: 0.0500 Epoch 42/60 72/72 [==============================] - ETA: 0s - loss: 0.0941 - tversky: 0.9571 Epoch 00042: val_loss did not improve from 0.15117 72/72 [==============================] - 206s 3s/step - loss: 0.0941 - tversky: 0.9571 - val_loss: 0.1679 - val_tversky: 0.9066 - lr: 0.0500 Epoch 43/60 72/72 [==============================] - ETA: 0s - loss: 0.0924 - tversky: 0.9581 Epoch 00043: val_loss did not improve from 0.15117 72/72 [==============================] - 206s 3s/step - loss: 0.0924 - tversky: 0.9581 - val_loss: 0.2040 - val_tversky: 0.8797 - lr: 0.0500 Epoch 44/60 72/72 [==============================] - ETA: 0s - loss: 0.0959 - tversky: 0.9559 Epoch 00044: val_loss did not improve from 0.15117 72/72 [==============================] - 209s 3s/step - loss: 0.0959 - tversky: 0.9559 - val_loss: 0.1651 - val_tversky: 0.9088 - lr: 0.0500 Epoch 45/60 72/72 [==============================] - ETA: 0s - loss: 0.0932 - tversky: 0.9576 Epoch 00045: val_loss did not improve from 0.15117 72/72 [==============================] - 206s 3s/step - loss: 0.0932 - tversky: 0.9576 - val_loss: 0.1518 - val_tversky: 0.9185 - lr: 0.0500 Epoch 46/60 72/72 [==============================] - ETA: 0s - loss: 0.0912 - tversky: 0.9588 Epoch 00046: val_loss did not improve from 0.15117 72/72 [==============================] - 205s 3s/step - loss: 0.0912 - tversky: 0.9588 - val_loss: 0.1596 - val_tversky: 0.9130 - lr: 0.0500 Epoch 47/60 72/72 [==============================] - ETA: 0s - loss: 0.0967 - tversky: 0.9553 Epoch 00047: val_loss did not improve from 0.15117 72/72 [==============================] - 206s 3s/step - loss: 0.0967 - tversky: 0.9553 - val_loss: 0.1852 - val_tversky: 0.8941 - lr: 0.0500 Epoch 48/60 72/72 [==============================] - ETA: 0s - loss: 0.0937 - tversky: 0.9573 Epoch 00048: val_loss did not improve from 0.15117 72/72 [==============================] - 208s 3s/step - loss: 0.0937 - tversky: 0.9573 - val_loss: 0.1543 - val_tversky: 0.9172 - lr: 0.0500 Epoch 49/60 72/72 [==============================] - ETA: 0s - loss: 0.0878 - tversky: 0.9609 Epoch 00049: val_loss did not improve from 0.15117 Epoch 00049: ReduceLROnPlateau reducing learning rate to 0.010000000149011612. 72/72 [==============================] - 206s 3s/step - loss: 0.0878 - tversky: 0.9609 - val_loss: 0.1729 - val_tversky: 0.9034 - lr: 0.0500 Epoch 50/60 72/72 [==============================] - ETA: 0s - loss: 0.0832 - tversky: 0.9636 Epoch 00050: val_loss did not improve from 0.15117 72/72 [==============================] - 206s 3s/step - loss: 0.0832 - tversky: 0.9636 - val_loss: 0.1601 - val_tversky: 0.9123 - lr: 0.0100 Epoch 51/60 72/72 [==============================] - ETA: 0s - loss: 0.0797 - tversky: 0.9656 Epoch 00051: val_loss did not improve from 0.15117 72/72 [==============================] - 206s 3s/step - loss: 0.0797 - tversky: 0.9656 - val_loss: 0.1675 - val_tversky: 0.9068 - lr: 0.0100 Epoch 52/60 72/72 [==============================] - ETA: 0s - loss: 0.0789 - tversky: 0.9660 Epoch 00052: val_loss did not improve from 0.15117 72/72 [==============================] - 205s 3s/step - loss: 0.0789 - tversky: 0.9660 - val_loss: 0.1665 - val_tversky: 0.9073 - lr: 0.0100 Epoch 53/60 72/72 [==============================] - ETA: 0s - loss: 0.0778 - tversky: 0.9667 Epoch 00053: val_loss did not improve from 0.15117 72/72 [==============================] - 208s 3s/step - loss: 0.0778 - tversky: 0.9667 - val_loss: 0.1592 - val_tversky: 0.9135 - lr: 0.0100 Epoch 54/60 72/72 [==============================] - ETA: 0s - loss: 0.0769 - tversky: 0.9672 Epoch 00054: val_loss did not improve from 0.15117 72/72 [==============================] - 209s 3s/step - loss: 0.0769 - tversky: 0.9672 - val_loss: 0.1615 - val_tversky: 0.9118 - lr: 0.0100 Epoch 55/60 72/72 [==============================] - ETA: 0s - loss: 0.0771 - tversky: 0.9670 Epoch 00055: val_loss did not improve from 0.15117 72/72 [==============================] - 208s 3s/step - loss: 0.0771 - tversky: 0.9670 - val_loss: 0.1550 - val_tversky: 0.9163 - lr: 0.0100 Epoch 56/60 72/72 [==============================] - ETA: 0s - loss: 0.0762 - tversky: 0.9676 Epoch 00056: val_loss did not improve from 0.15117 72/72 [==============================] - 206s 3s/step - loss: 0.0762 - tversky: 0.9676 - val_loss: 0.1625 - val_tversky: 0.9111 - lr: 0.0100 Epoch 57/60 72/72 [==============================] - ETA: 0s - loss: 0.0757 - tversky: 0.9679 Epoch 00057: val_loss did not improve from 0.15117 72/72 [==============================] - 209s 3s/step - loss: 0.0757 - tversky: 0.9679 - val_loss: 0.1559 - val_tversky: 0.9158 - lr: 0.0100 Epoch 58/60 72/72 [==============================] - ETA: 0s - loss: 0.0747 - tversky: 0.9685 Epoch 00058: val_loss did not improve from 0.15117 72/72 [==============================] - 209s 3s/step - loss: 0.0747 - tversky: 0.9685 - val_loss: 0.1624 - val_tversky: 0.9109 - lr: 0.0100 Epoch 59/60 72/72 [==============================] - ETA: 0s - loss: 0.0749 - tversky: 0.9684 Epoch 00059: val_loss did not improve from 0.15117 Epoch 00059: ReduceLROnPlateau reducing learning rate to 0.0019999999552965165. 72/72 [==============================] - 207s 3s/step - loss: 0.0749 - tversky: 0.9684 - val_loss: 0.1539 - val_tversky: 0.9172 - lr: 0.0100 Epoch 00059: early stopping
seg_model_json = seg_model.to_json()
with open("ResUNet-seg-model.json", "w") as json_file:
json_file.write(seg_model_json)
h.history.keys()
dict_keys(['loss', 'tversky', 'val_loss', 'val_tversky', 'lr'])
plt.figure(figsize=(12,5))
plt.subplot(1,2,1)
plt.plot(h.history['loss']);
plt.plot(h.history['val_loss']);
plt.title("SEG Model focal tversky Loss");
plt.ylabel("focal tversky loss");
plt.xlabel("Epochs");
plt.legend(['train', 'val']);
plt.subplot(1,2,2)
plt.plot(h.history['tversky']);
plt.plot(h.history['val_tversky']);
plt.title("SEG Model tversky score");
plt.ylabel("tversky Accuracy");
plt.xlabel("Epochs");
plt.legend(['train', 'val']);
with open('clf-resnet-model.json', 'r') as json_file:
json_savedModel= json_file.read()
#load the model architecture
model = tf.keras.models.model_from_json(json_savedModel)
model.summary()
Model: "model"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_1 (InputLayer) [(None, 256, 256, 3 0 []
)]
conv1_pad (ZeroPadding2D) (None, 262, 262, 3) 0 ['input_1[0][0]']
conv1_conv (Conv2D) (None, 128, 128, 64 9472 ['conv1_pad[0][0]']
)
conv1_bn (BatchNormalization) (None, 128, 128, 64 256 ['conv1_conv[0][0]']
)
conv1_relu (Activation) (None, 128, 128, 64 0 ['conv1_bn[0][0]']
)
pool1_pad (ZeroPadding2D) (None, 130, 130, 64 0 ['conv1_relu[0][0]']
)
pool1_pool (MaxPooling2D) (None, 64, 64, 64) 0 ['pool1_pad[0][0]']
conv2_block1_1_conv (Conv2D) (None, 64, 64, 64) 4160 ['pool1_pool[0][0]']
conv2_block1_1_bn (BatchNormal (None, 64, 64, 64) 256 ['conv2_block1_1_conv[0][0]']
ization)
conv2_block1_1_relu (Activatio (None, 64, 64, 64) 0 ['conv2_block1_1_bn[0][0]']
n)
conv2_block1_2_conv (Conv2D) (None, 64, 64, 64) 36928 ['conv2_block1_1_relu[0][0]']
conv2_block1_2_bn (BatchNormal (None, 64, 64, 64) 256 ['conv2_block1_2_conv[0][0]']
ization)
conv2_block1_2_relu (Activatio (None, 64, 64, 64) 0 ['conv2_block1_2_bn[0][0]']
n)
conv2_block1_0_conv (Conv2D) (None, 64, 64, 256) 16640 ['pool1_pool[0][0]']
conv2_block1_3_conv (Conv2D) (None, 64, 64, 256) 16640 ['conv2_block1_2_relu[0][0]']
conv2_block1_0_bn (BatchNormal (None, 64, 64, 256) 1024 ['conv2_block1_0_conv[0][0]']
ization)
conv2_block1_3_bn (BatchNormal (None, 64, 64, 256) 1024 ['conv2_block1_3_conv[0][0]']
ization)
conv2_block1_add (Add) (None, 64, 64, 256) 0 ['conv2_block1_0_bn[0][0]',
'conv2_block1_3_bn[0][0]']
conv2_block1_out (Activation) (None, 64, 64, 256) 0 ['conv2_block1_add[0][0]']
conv2_block2_1_conv (Conv2D) (None, 64, 64, 64) 16448 ['conv2_block1_out[0][0]']
conv2_block2_1_bn (BatchNormal (None, 64, 64, 64) 256 ['conv2_block2_1_conv[0][0]']
ization)
conv2_block2_1_relu (Activatio (None, 64, 64, 64) 0 ['conv2_block2_1_bn[0][0]']
n)
conv2_block2_2_conv (Conv2D) (None, 64, 64, 64) 36928 ['conv2_block2_1_relu[0][0]']
conv2_block2_2_bn (BatchNormal (None, 64, 64, 64) 256 ['conv2_block2_2_conv[0][0]']
ization)
conv2_block2_2_relu (Activatio (None, 64, 64, 64) 0 ['conv2_block2_2_bn[0][0]']
n)
conv2_block2_3_conv (Conv2D) (None, 64, 64, 256) 16640 ['conv2_block2_2_relu[0][0]']
conv2_block2_3_bn (BatchNormal (None, 64, 64, 256) 1024 ['conv2_block2_3_conv[0][0]']
ization)
conv2_block2_add (Add) (None, 64, 64, 256) 0 ['conv2_block1_out[0][0]',
'conv2_block2_3_bn[0][0]']
conv2_block2_out (Activation) (None, 64, 64, 256) 0 ['conv2_block2_add[0][0]']
conv2_block3_1_conv (Conv2D) (None, 64, 64, 64) 16448 ['conv2_block2_out[0][0]']
conv2_block3_1_bn (BatchNormal (None, 64, 64, 64) 256 ['conv2_block3_1_conv[0][0]']
ization)
conv2_block3_1_relu (Activatio (None, 64, 64, 64) 0 ['conv2_block3_1_bn[0][0]']
n)
conv2_block3_2_conv (Conv2D) (None, 64, 64, 64) 36928 ['conv2_block3_1_relu[0][0]']
conv2_block3_2_bn (BatchNormal (None, 64, 64, 64) 256 ['conv2_block3_2_conv[0][0]']
ization)
conv2_block3_2_relu (Activatio (None, 64, 64, 64) 0 ['conv2_block3_2_bn[0][0]']
n)
conv2_block3_3_conv (Conv2D) (None, 64, 64, 256) 16640 ['conv2_block3_2_relu[0][0]']
conv2_block3_3_bn (BatchNormal (None, 64, 64, 256) 1024 ['conv2_block3_3_conv[0][0]']
ization)
conv2_block3_add (Add) (None, 64, 64, 256) 0 ['conv2_block2_out[0][0]',
'conv2_block3_3_bn[0][0]']
conv2_block3_out (Activation) (None, 64, 64, 256) 0 ['conv2_block3_add[0][0]']
conv3_block1_1_conv (Conv2D) (None, 32, 32, 128) 32896 ['conv2_block3_out[0][0]']
conv3_block1_1_bn (BatchNormal (None, 32, 32, 128) 512 ['conv3_block1_1_conv[0][0]']
ization)
conv3_block1_1_relu (Activatio (None, 32, 32, 128) 0 ['conv3_block1_1_bn[0][0]']
n)
conv3_block1_2_conv (Conv2D) (None, 32, 32, 128) 147584 ['conv3_block1_1_relu[0][0]']
conv3_block1_2_bn (BatchNormal (None, 32, 32, 128) 512 ['conv3_block1_2_conv[0][0]']
ization)
conv3_block1_2_relu (Activatio (None, 32, 32, 128) 0 ['conv3_block1_2_bn[0][0]']
n)
conv3_block1_0_conv (Conv2D) (None, 32, 32, 512) 131584 ['conv2_block3_out[0][0]']
conv3_block1_3_conv (Conv2D) (None, 32, 32, 512) 66048 ['conv3_block1_2_relu[0][0]']
conv3_block1_0_bn (BatchNormal (None, 32, 32, 512) 2048 ['conv3_block1_0_conv[0][0]']
ization)
conv3_block1_3_bn (BatchNormal (None, 32, 32, 512) 2048 ['conv3_block1_3_conv[0][0]']
ization)
conv3_block1_add (Add) (None, 32, 32, 512) 0 ['conv3_block1_0_bn[0][0]',
'conv3_block1_3_bn[0][0]']
conv3_block1_out (Activation) (None, 32, 32, 512) 0 ['conv3_block1_add[0][0]']
conv3_block2_1_conv (Conv2D) (None, 32, 32, 128) 65664 ['conv3_block1_out[0][0]']
conv3_block2_1_bn (BatchNormal (None, 32, 32, 128) 512 ['conv3_block2_1_conv[0][0]']
ization)
conv3_block2_1_relu (Activatio (None, 32, 32, 128) 0 ['conv3_block2_1_bn[0][0]']
n)
conv3_block2_2_conv (Conv2D) (None, 32, 32, 128) 147584 ['conv3_block2_1_relu[0][0]']
conv3_block2_2_bn (BatchNormal (None, 32, 32, 128) 512 ['conv3_block2_2_conv[0][0]']
ization)
conv3_block2_2_relu (Activatio (None, 32, 32, 128) 0 ['conv3_block2_2_bn[0][0]']
n)
conv3_block2_3_conv (Conv2D) (None, 32, 32, 512) 66048 ['conv3_block2_2_relu[0][0]']
conv3_block2_3_bn (BatchNormal (None, 32, 32, 512) 2048 ['conv3_block2_3_conv[0][0]']
ization)
conv3_block2_add (Add) (None, 32, 32, 512) 0 ['conv3_block1_out[0][0]',
'conv3_block2_3_bn[0][0]']
conv3_block2_out (Activation) (None, 32, 32, 512) 0 ['conv3_block2_add[0][0]']
conv3_block3_1_conv (Conv2D) (None, 32, 32, 128) 65664 ['conv3_block2_out[0][0]']
conv3_block3_1_bn (BatchNormal (None, 32, 32, 128) 512 ['conv3_block3_1_conv[0][0]']
ization)
conv3_block3_1_relu (Activatio (None, 32, 32, 128) 0 ['conv3_block3_1_bn[0][0]']
n)
conv3_block3_2_conv (Conv2D) (None, 32, 32, 128) 147584 ['conv3_block3_1_relu[0][0]']
conv3_block3_2_bn (BatchNormal (None, 32, 32, 128) 512 ['conv3_block3_2_conv[0][0]']
ization)
conv3_block3_2_relu (Activatio (None, 32, 32, 128) 0 ['conv3_block3_2_bn[0][0]']
n)
conv3_block3_3_conv (Conv2D) (None, 32, 32, 512) 66048 ['conv3_block3_2_relu[0][0]']
conv3_block3_3_bn (BatchNormal (None, 32, 32, 512) 2048 ['conv3_block3_3_conv[0][0]']
ization)
conv3_block3_add (Add) (None, 32, 32, 512) 0 ['conv3_block2_out[0][0]',
'conv3_block3_3_bn[0][0]']
conv3_block3_out (Activation) (None, 32, 32, 512) 0 ['conv3_block3_add[0][0]']
conv3_block4_1_conv (Conv2D) (None, 32, 32, 128) 65664 ['conv3_block3_out[0][0]']
conv3_block4_1_bn (BatchNormal (None, 32, 32, 128) 512 ['conv3_block4_1_conv[0][0]']
ization)
conv3_block4_1_relu (Activatio (None, 32, 32, 128) 0 ['conv3_block4_1_bn[0][0]']
n)
conv3_block4_2_conv (Conv2D) (None, 32, 32, 128) 147584 ['conv3_block4_1_relu[0][0]']
conv3_block4_2_bn (BatchNormal (None, 32, 32, 128) 512 ['conv3_block4_2_conv[0][0]']
ization)
conv3_block4_2_relu (Activatio (None, 32, 32, 128) 0 ['conv3_block4_2_bn[0][0]']
n)
conv3_block4_3_conv (Conv2D) (None, 32, 32, 512) 66048 ['conv3_block4_2_relu[0][0]']
conv3_block4_3_bn (BatchNormal (None, 32, 32, 512) 2048 ['conv3_block4_3_conv[0][0]']
ization)
conv3_block4_add (Add) (None, 32, 32, 512) 0 ['conv3_block3_out[0][0]',
'conv3_block4_3_bn[0][0]']
conv3_block4_out (Activation) (None, 32, 32, 512) 0 ['conv3_block4_add[0][0]']
conv4_block1_1_conv (Conv2D) (None, 16, 16, 256) 131328 ['conv3_block4_out[0][0]']
conv4_block1_1_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block1_1_conv[0][0]']
ization)
conv4_block1_1_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block1_1_bn[0][0]']
n)
conv4_block1_2_conv (Conv2D) (None, 16, 16, 256) 590080 ['conv4_block1_1_relu[0][0]']
conv4_block1_2_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block1_2_conv[0][0]']
ization)
conv4_block1_2_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block1_2_bn[0][0]']
n)
conv4_block1_0_conv (Conv2D) (None, 16, 16, 1024 525312 ['conv3_block4_out[0][0]']
)
conv4_block1_3_conv (Conv2D) (None, 16, 16, 1024 263168 ['conv4_block1_2_relu[0][0]']
)
conv4_block1_0_bn (BatchNormal (None, 16, 16, 1024 4096 ['conv4_block1_0_conv[0][0]']
ization) )
conv4_block1_3_bn (BatchNormal (None, 16, 16, 1024 4096 ['conv4_block1_3_conv[0][0]']
ization) )
conv4_block1_add (Add) (None, 16, 16, 1024 0 ['conv4_block1_0_bn[0][0]',
) 'conv4_block1_3_bn[0][0]']
conv4_block1_out (Activation) (None, 16, 16, 1024 0 ['conv4_block1_add[0][0]']
)
conv4_block2_1_conv (Conv2D) (None, 16, 16, 256) 262400 ['conv4_block1_out[0][0]']
conv4_block2_1_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block2_1_conv[0][0]']
ization)
conv4_block2_1_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block2_1_bn[0][0]']
n)
conv4_block2_2_conv (Conv2D) (None, 16, 16, 256) 590080 ['conv4_block2_1_relu[0][0]']
conv4_block2_2_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block2_2_conv[0][0]']
ization)
conv4_block2_2_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block2_2_bn[0][0]']
n)
conv4_block2_3_conv (Conv2D) (None, 16, 16, 1024 263168 ['conv4_block2_2_relu[0][0]']
)
conv4_block2_3_bn (BatchNormal (None, 16, 16, 1024 4096 ['conv4_block2_3_conv[0][0]']
ization) )
conv4_block2_add (Add) (None, 16, 16, 1024 0 ['conv4_block1_out[0][0]',
) 'conv4_block2_3_bn[0][0]']
conv4_block2_out (Activation) (None, 16, 16, 1024 0 ['conv4_block2_add[0][0]']
)
conv4_block3_1_conv (Conv2D) (None, 16, 16, 256) 262400 ['conv4_block2_out[0][0]']
conv4_block3_1_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block3_1_conv[0][0]']
ization)
conv4_block3_1_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block3_1_bn[0][0]']
n)
conv4_block3_2_conv (Conv2D) (None, 16, 16, 256) 590080 ['conv4_block3_1_relu[0][0]']
conv4_block3_2_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block3_2_conv[0][0]']
ization)
conv4_block3_2_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block3_2_bn[0][0]']
n)
conv4_block3_3_conv (Conv2D) (None, 16, 16, 1024 263168 ['conv4_block3_2_relu[0][0]']
)
conv4_block3_3_bn (BatchNormal (None, 16, 16, 1024 4096 ['conv4_block3_3_conv[0][0]']
ization) )
conv4_block3_add (Add) (None, 16, 16, 1024 0 ['conv4_block2_out[0][0]',
) 'conv4_block3_3_bn[0][0]']
conv4_block3_out (Activation) (None, 16, 16, 1024 0 ['conv4_block3_add[0][0]']
)
conv4_block4_1_conv (Conv2D) (None, 16, 16, 256) 262400 ['conv4_block3_out[0][0]']
conv4_block4_1_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block4_1_conv[0][0]']
ization)
conv4_block4_1_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block4_1_bn[0][0]']
n)
conv4_block4_2_conv (Conv2D) (None, 16, 16, 256) 590080 ['conv4_block4_1_relu[0][0]']
conv4_block4_2_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block4_2_conv[0][0]']
ization)
conv4_block4_2_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block4_2_bn[0][0]']
n)
conv4_block4_3_conv (Conv2D) (None, 16, 16, 1024 263168 ['conv4_block4_2_relu[0][0]']
)
conv4_block4_3_bn (BatchNormal (None, 16, 16, 1024 4096 ['conv4_block4_3_conv[0][0]']
ization) )
conv4_block4_add (Add) (None, 16, 16, 1024 0 ['conv4_block3_out[0][0]',
) 'conv4_block4_3_bn[0][0]']
conv4_block4_out (Activation) (None, 16, 16, 1024 0 ['conv4_block4_add[0][0]']
)
conv4_block5_1_conv (Conv2D) (None, 16, 16, 256) 262400 ['conv4_block4_out[0][0]']
conv4_block5_1_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block5_1_conv[0][0]']
ization)
conv4_block5_1_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block5_1_bn[0][0]']
n)
conv4_block5_2_conv (Conv2D) (None, 16, 16, 256) 590080 ['conv4_block5_1_relu[0][0]']
conv4_block5_2_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block5_2_conv[0][0]']
ization)
conv4_block5_2_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block5_2_bn[0][0]']
n)
conv4_block5_3_conv (Conv2D) (None, 16, 16, 1024 263168 ['conv4_block5_2_relu[0][0]']
)
conv4_block5_3_bn (BatchNormal (None, 16, 16, 1024 4096 ['conv4_block5_3_conv[0][0]']
ization) )
conv4_block5_add (Add) (None, 16, 16, 1024 0 ['conv4_block4_out[0][0]',
) 'conv4_block5_3_bn[0][0]']
conv4_block5_out (Activation) (None, 16, 16, 1024 0 ['conv4_block5_add[0][0]']
)
conv4_block6_1_conv (Conv2D) (None, 16, 16, 256) 262400 ['conv4_block5_out[0][0]']
conv4_block6_1_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block6_1_conv[0][0]']
ization)
conv4_block6_1_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block6_1_bn[0][0]']
n)
conv4_block6_2_conv (Conv2D) (None, 16, 16, 256) 590080 ['conv4_block6_1_relu[0][0]']
conv4_block6_2_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block6_2_conv[0][0]']
ization)
conv4_block6_2_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block6_2_bn[0][0]']
n)
conv4_block6_3_conv (Conv2D) (None, 16, 16, 1024 263168 ['conv4_block6_2_relu[0][0]']
)
conv4_block6_3_bn (BatchNormal (None, 16, 16, 1024 4096 ['conv4_block6_3_conv[0][0]']
ization) )
conv4_block6_add (Add) (None, 16, 16, 1024 0 ['conv4_block5_out[0][0]',
) 'conv4_block6_3_bn[0][0]']
conv4_block6_out (Activation) (None, 16, 16, 1024 0 ['conv4_block6_add[0][0]']
)
conv5_block1_1_conv (Conv2D) (None, 8, 8, 512) 524800 ['conv4_block6_out[0][0]']
conv5_block1_1_bn (BatchNormal (None, 8, 8, 512) 2048 ['conv5_block1_1_conv[0][0]']
ization)
conv5_block1_1_relu (Activatio (None, 8, 8, 512) 0 ['conv5_block1_1_bn[0][0]']
n)
conv5_block1_2_conv (Conv2D) (None, 8, 8, 512) 2359808 ['conv5_block1_1_relu[0][0]']
conv5_block1_2_bn (BatchNormal (None, 8, 8, 512) 2048 ['conv5_block1_2_conv[0][0]']
ization)
conv5_block1_2_relu (Activatio (None, 8, 8, 512) 0 ['conv5_block1_2_bn[0][0]']
n)
conv5_block1_0_conv (Conv2D) (None, 8, 8, 2048) 2099200 ['conv4_block6_out[0][0]']
conv5_block1_3_conv (Conv2D) (None, 8, 8, 2048) 1050624 ['conv5_block1_2_relu[0][0]']
conv5_block1_0_bn (BatchNormal (None, 8, 8, 2048) 8192 ['conv5_block1_0_conv[0][0]']
ization)
conv5_block1_3_bn (BatchNormal (None, 8, 8, 2048) 8192 ['conv5_block1_3_conv[0][0]']
ization)
conv5_block1_add (Add) (None, 8, 8, 2048) 0 ['conv5_block1_0_bn[0][0]',
'conv5_block1_3_bn[0][0]']
conv5_block1_out (Activation) (None, 8, 8, 2048) 0 ['conv5_block1_add[0][0]']
conv5_block2_1_conv (Conv2D) (None, 8, 8, 512) 1049088 ['conv5_block1_out[0][0]']
conv5_block2_1_bn (BatchNormal (None, 8, 8, 512) 2048 ['conv5_block2_1_conv[0][0]']
ization)
conv5_block2_1_relu (Activatio (None, 8, 8, 512) 0 ['conv5_block2_1_bn[0][0]']
n)
conv5_block2_2_conv (Conv2D) (None, 8, 8, 512) 2359808 ['conv5_block2_1_relu[0][0]']
conv5_block2_2_bn (BatchNormal (None, 8, 8, 512) 2048 ['conv5_block2_2_conv[0][0]']
ization)
conv5_block2_2_relu (Activatio (None, 8, 8, 512) 0 ['conv5_block2_2_bn[0][0]']
n)
conv5_block2_3_conv (Conv2D) (None, 8, 8, 2048) 1050624 ['conv5_block2_2_relu[0][0]']
conv5_block2_3_bn (BatchNormal (None, 8, 8, 2048) 8192 ['conv5_block2_3_conv[0][0]']
ization)
conv5_block2_add (Add) (None, 8, 8, 2048) 0 ['conv5_block1_out[0][0]',
'conv5_block2_3_bn[0][0]']
conv5_block2_out (Activation) (None, 8, 8, 2048) 0 ['conv5_block2_add[0][0]']
conv5_block3_1_conv (Conv2D) (None, 8, 8, 512) 1049088 ['conv5_block2_out[0][0]']
conv5_block3_1_bn (BatchNormal (None, 8, 8, 512) 2048 ['conv5_block3_1_conv[0][0]']
ization)
conv5_block3_1_relu (Activatio (None, 8, 8, 512) 0 ['conv5_block3_1_bn[0][0]']
n)
conv5_block3_2_conv (Conv2D) (None, 8, 8, 512) 2359808 ['conv5_block3_1_relu[0][0]']
conv5_block3_2_bn (BatchNormal (None, 8, 8, 512) 2048 ['conv5_block3_2_conv[0][0]']
ization)
conv5_block3_2_relu (Activatio (None, 8, 8, 512) 0 ['conv5_block3_2_bn[0][0]']
n)
conv5_block3_3_conv (Conv2D) (None, 8, 8, 2048) 1050624 ['conv5_block3_2_relu[0][0]']
conv5_block3_3_bn (BatchNormal (None, 8, 8, 2048) 8192 ['conv5_block3_3_conv[0][0]']
ization)
conv5_block3_add (Add) (None, 8, 8, 2048) 0 ['conv5_block2_out[0][0]',
'conv5_block3_3_bn[0][0]']
conv5_block3_out (Activation) (None, 8, 8, 2048) 0 ['conv5_block3_add[0][0]']
average_pooling2d (AveragePool (None, 2, 2, 2048) 0 ['conv5_block3_out[0][0]']
ing2D)
Flatten (Flatten) (None, 8192) 0 ['average_pooling2d[0][0]']
dense (Dense) (None, 256) 2097408 ['Flatten[0][0]']
dropout (Dropout) (None, 256) 0 ['dense[0][0]']
dense_1 (Dense) (None, 256) 65792 ['dropout[0][0]']
dropout_1 (Dropout) (None, 256) 0 ['dense_1[0][0]']
dense_2 (Dense) (None, 2) 514 ['dropout_1[0][0]']
==================================================================================================
Total params: 25,751,426
Trainable params: 25,698,306
Non-trainable params: 53,120
__________________________________________________________________________________________________
model.load_weights('clf-resnet-weights.hdf5')
model.compile(loss = 'categorical_crossentropy',
optimizer='adam',
metrics= ["accuracy"]
)
model.summary()
Model: "model"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_1 (InputLayer) [(None, 256, 256, 3 0 []
)]
conv1_pad (ZeroPadding2D) (None, 262, 262, 3) 0 ['input_1[0][0]']
conv1_conv (Conv2D) (None, 128, 128, 64 9472 ['conv1_pad[0][0]']
)
conv1_bn (BatchNormalization) (None, 128, 128, 64 256 ['conv1_conv[0][0]']
)
conv1_relu (Activation) (None, 128, 128, 64 0 ['conv1_bn[0][0]']
)
pool1_pad (ZeroPadding2D) (None, 130, 130, 64 0 ['conv1_relu[0][0]']
)
pool1_pool (MaxPooling2D) (None, 64, 64, 64) 0 ['pool1_pad[0][0]']
conv2_block1_1_conv (Conv2D) (None, 64, 64, 64) 4160 ['pool1_pool[0][0]']
conv2_block1_1_bn (BatchNormal (None, 64, 64, 64) 256 ['conv2_block1_1_conv[0][0]']
ization)
conv2_block1_1_relu (Activatio (None, 64, 64, 64) 0 ['conv2_block1_1_bn[0][0]']
n)
conv2_block1_2_conv (Conv2D) (None, 64, 64, 64) 36928 ['conv2_block1_1_relu[0][0]']
conv2_block1_2_bn (BatchNormal (None, 64, 64, 64) 256 ['conv2_block1_2_conv[0][0]']
ization)
conv2_block1_2_relu (Activatio (None, 64, 64, 64) 0 ['conv2_block1_2_bn[0][0]']
n)
conv2_block1_0_conv (Conv2D) (None, 64, 64, 256) 16640 ['pool1_pool[0][0]']
conv2_block1_3_conv (Conv2D) (None, 64, 64, 256) 16640 ['conv2_block1_2_relu[0][0]']
conv2_block1_0_bn (BatchNormal (None, 64, 64, 256) 1024 ['conv2_block1_0_conv[0][0]']
ization)
conv2_block1_3_bn (BatchNormal (None, 64, 64, 256) 1024 ['conv2_block1_3_conv[0][0]']
ization)
conv2_block1_add (Add) (None, 64, 64, 256) 0 ['conv2_block1_0_bn[0][0]',
'conv2_block1_3_bn[0][0]']
conv2_block1_out (Activation) (None, 64, 64, 256) 0 ['conv2_block1_add[0][0]']
conv2_block2_1_conv (Conv2D) (None, 64, 64, 64) 16448 ['conv2_block1_out[0][0]']
conv2_block2_1_bn (BatchNormal (None, 64, 64, 64) 256 ['conv2_block2_1_conv[0][0]']
ization)
conv2_block2_1_relu (Activatio (None, 64, 64, 64) 0 ['conv2_block2_1_bn[0][0]']
n)
conv2_block2_2_conv (Conv2D) (None, 64, 64, 64) 36928 ['conv2_block2_1_relu[0][0]']
conv2_block2_2_bn (BatchNormal (None, 64, 64, 64) 256 ['conv2_block2_2_conv[0][0]']
ization)
conv2_block2_2_relu (Activatio (None, 64, 64, 64) 0 ['conv2_block2_2_bn[0][0]']
n)
conv2_block2_3_conv (Conv2D) (None, 64, 64, 256) 16640 ['conv2_block2_2_relu[0][0]']
conv2_block2_3_bn (BatchNormal (None, 64, 64, 256) 1024 ['conv2_block2_3_conv[0][0]']
ization)
conv2_block2_add (Add) (None, 64, 64, 256) 0 ['conv2_block1_out[0][0]',
'conv2_block2_3_bn[0][0]']
conv2_block2_out (Activation) (None, 64, 64, 256) 0 ['conv2_block2_add[0][0]']
conv2_block3_1_conv (Conv2D) (None, 64, 64, 64) 16448 ['conv2_block2_out[0][0]']
conv2_block3_1_bn (BatchNormal (None, 64, 64, 64) 256 ['conv2_block3_1_conv[0][0]']
ization)
conv2_block3_1_relu (Activatio (None, 64, 64, 64) 0 ['conv2_block3_1_bn[0][0]']
n)
conv2_block3_2_conv (Conv2D) (None, 64, 64, 64) 36928 ['conv2_block3_1_relu[0][0]']
conv2_block3_2_bn (BatchNormal (None, 64, 64, 64) 256 ['conv2_block3_2_conv[0][0]']
ization)
conv2_block3_2_relu (Activatio (None, 64, 64, 64) 0 ['conv2_block3_2_bn[0][0]']
n)
conv2_block3_3_conv (Conv2D) (None, 64, 64, 256) 16640 ['conv2_block3_2_relu[0][0]']
conv2_block3_3_bn (BatchNormal (None, 64, 64, 256) 1024 ['conv2_block3_3_conv[0][0]']
ization)
conv2_block3_add (Add) (None, 64, 64, 256) 0 ['conv2_block2_out[0][0]',
'conv2_block3_3_bn[0][0]']
conv2_block3_out (Activation) (None, 64, 64, 256) 0 ['conv2_block3_add[0][0]']
conv3_block1_1_conv (Conv2D) (None, 32, 32, 128) 32896 ['conv2_block3_out[0][0]']
conv3_block1_1_bn (BatchNormal (None, 32, 32, 128) 512 ['conv3_block1_1_conv[0][0]']
ization)
conv3_block1_1_relu (Activatio (None, 32, 32, 128) 0 ['conv3_block1_1_bn[0][0]']
n)
conv3_block1_2_conv (Conv2D) (None, 32, 32, 128) 147584 ['conv3_block1_1_relu[0][0]']
conv3_block1_2_bn (BatchNormal (None, 32, 32, 128) 512 ['conv3_block1_2_conv[0][0]']
ization)
conv3_block1_2_relu (Activatio (None, 32, 32, 128) 0 ['conv3_block1_2_bn[0][0]']
n)
conv3_block1_0_conv (Conv2D) (None, 32, 32, 512) 131584 ['conv2_block3_out[0][0]']
conv3_block1_3_conv (Conv2D) (None, 32, 32, 512) 66048 ['conv3_block1_2_relu[0][0]']
conv3_block1_0_bn (BatchNormal (None, 32, 32, 512) 2048 ['conv3_block1_0_conv[0][0]']
ization)
conv3_block1_3_bn (BatchNormal (None, 32, 32, 512) 2048 ['conv3_block1_3_conv[0][0]']
ization)
conv3_block1_add (Add) (None, 32, 32, 512) 0 ['conv3_block1_0_bn[0][0]',
'conv3_block1_3_bn[0][0]']
conv3_block1_out (Activation) (None, 32, 32, 512) 0 ['conv3_block1_add[0][0]']
conv3_block2_1_conv (Conv2D) (None, 32, 32, 128) 65664 ['conv3_block1_out[0][0]']
conv3_block2_1_bn (BatchNormal (None, 32, 32, 128) 512 ['conv3_block2_1_conv[0][0]']
ization)
conv3_block2_1_relu (Activatio (None, 32, 32, 128) 0 ['conv3_block2_1_bn[0][0]']
n)
conv3_block2_2_conv (Conv2D) (None, 32, 32, 128) 147584 ['conv3_block2_1_relu[0][0]']
conv3_block2_2_bn (BatchNormal (None, 32, 32, 128) 512 ['conv3_block2_2_conv[0][0]']
ization)
conv3_block2_2_relu (Activatio (None, 32, 32, 128) 0 ['conv3_block2_2_bn[0][0]']
n)
conv3_block2_3_conv (Conv2D) (None, 32, 32, 512) 66048 ['conv3_block2_2_relu[0][0]']
conv3_block2_3_bn (BatchNormal (None, 32, 32, 512) 2048 ['conv3_block2_3_conv[0][0]']
ization)
conv3_block2_add (Add) (None, 32, 32, 512) 0 ['conv3_block1_out[0][0]',
'conv3_block2_3_bn[0][0]']
conv3_block2_out (Activation) (None, 32, 32, 512) 0 ['conv3_block2_add[0][0]']
conv3_block3_1_conv (Conv2D) (None, 32, 32, 128) 65664 ['conv3_block2_out[0][0]']
conv3_block3_1_bn (BatchNormal (None, 32, 32, 128) 512 ['conv3_block3_1_conv[0][0]']
ization)
conv3_block3_1_relu (Activatio (None, 32, 32, 128) 0 ['conv3_block3_1_bn[0][0]']
n)
conv3_block3_2_conv (Conv2D) (None, 32, 32, 128) 147584 ['conv3_block3_1_relu[0][0]']
conv3_block3_2_bn (BatchNormal (None, 32, 32, 128) 512 ['conv3_block3_2_conv[0][0]']
ization)
conv3_block3_2_relu (Activatio (None, 32, 32, 128) 0 ['conv3_block3_2_bn[0][0]']
n)
conv3_block3_3_conv (Conv2D) (None, 32, 32, 512) 66048 ['conv3_block3_2_relu[0][0]']
conv3_block3_3_bn (BatchNormal (None, 32, 32, 512) 2048 ['conv3_block3_3_conv[0][0]']
ization)
conv3_block3_add (Add) (None, 32, 32, 512) 0 ['conv3_block2_out[0][0]',
'conv3_block3_3_bn[0][0]']
conv3_block3_out (Activation) (None, 32, 32, 512) 0 ['conv3_block3_add[0][0]']
conv3_block4_1_conv (Conv2D) (None, 32, 32, 128) 65664 ['conv3_block3_out[0][0]']
conv3_block4_1_bn (BatchNormal (None, 32, 32, 128) 512 ['conv3_block4_1_conv[0][0]']
ization)
conv3_block4_1_relu (Activatio (None, 32, 32, 128) 0 ['conv3_block4_1_bn[0][0]']
n)
conv3_block4_2_conv (Conv2D) (None, 32, 32, 128) 147584 ['conv3_block4_1_relu[0][0]']
conv3_block4_2_bn (BatchNormal (None, 32, 32, 128) 512 ['conv3_block4_2_conv[0][0]']
ization)
conv3_block4_2_relu (Activatio (None, 32, 32, 128) 0 ['conv3_block4_2_bn[0][0]']
n)
conv3_block4_3_conv (Conv2D) (None, 32, 32, 512) 66048 ['conv3_block4_2_relu[0][0]']
conv3_block4_3_bn (BatchNormal (None, 32, 32, 512) 2048 ['conv3_block4_3_conv[0][0]']
ization)
conv3_block4_add (Add) (None, 32, 32, 512) 0 ['conv3_block3_out[0][0]',
'conv3_block4_3_bn[0][0]']
conv3_block4_out (Activation) (None, 32, 32, 512) 0 ['conv3_block4_add[0][0]']
conv4_block1_1_conv (Conv2D) (None, 16, 16, 256) 131328 ['conv3_block4_out[0][0]']
conv4_block1_1_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block1_1_conv[0][0]']
ization)
conv4_block1_1_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block1_1_bn[0][0]']
n)
conv4_block1_2_conv (Conv2D) (None, 16, 16, 256) 590080 ['conv4_block1_1_relu[0][0]']
conv4_block1_2_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block1_2_conv[0][0]']
ization)
conv4_block1_2_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block1_2_bn[0][0]']
n)
conv4_block1_0_conv (Conv2D) (None, 16, 16, 1024 525312 ['conv3_block4_out[0][0]']
)
conv4_block1_3_conv (Conv2D) (None, 16, 16, 1024 263168 ['conv4_block1_2_relu[0][0]']
)
conv4_block1_0_bn (BatchNormal (None, 16, 16, 1024 4096 ['conv4_block1_0_conv[0][0]']
ization) )
conv4_block1_3_bn (BatchNormal (None, 16, 16, 1024 4096 ['conv4_block1_3_conv[0][0]']
ization) )
conv4_block1_add (Add) (None, 16, 16, 1024 0 ['conv4_block1_0_bn[0][0]',
) 'conv4_block1_3_bn[0][0]']
conv4_block1_out (Activation) (None, 16, 16, 1024 0 ['conv4_block1_add[0][0]']
)
conv4_block2_1_conv (Conv2D) (None, 16, 16, 256) 262400 ['conv4_block1_out[0][0]']
conv4_block2_1_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block2_1_conv[0][0]']
ization)
conv4_block2_1_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block2_1_bn[0][0]']
n)
conv4_block2_2_conv (Conv2D) (None, 16, 16, 256) 590080 ['conv4_block2_1_relu[0][0]']
conv4_block2_2_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block2_2_conv[0][0]']
ization)
conv4_block2_2_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block2_2_bn[0][0]']
n)
conv4_block2_3_conv (Conv2D) (None, 16, 16, 1024 263168 ['conv4_block2_2_relu[0][0]']
)
conv4_block2_3_bn (BatchNormal (None, 16, 16, 1024 4096 ['conv4_block2_3_conv[0][0]']
ization) )
conv4_block2_add (Add) (None, 16, 16, 1024 0 ['conv4_block1_out[0][0]',
) 'conv4_block2_3_bn[0][0]']
conv4_block2_out (Activation) (None, 16, 16, 1024 0 ['conv4_block2_add[0][0]']
)
conv4_block3_1_conv (Conv2D) (None, 16, 16, 256) 262400 ['conv4_block2_out[0][0]']
conv4_block3_1_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block3_1_conv[0][0]']
ization)
conv4_block3_1_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block3_1_bn[0][0]']
n)
conv4_block3_2_conv (Conv2D) (None, 16, 16, 256) 590080 ['conv4_block3_1_relu[0][0]']
conv4_block3_2_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block3_2_conv[0][0]']
ization)
conv4_block3_2_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block3_2_bn[0][0]']
n)
conv4_block3_3_conv (Conv2D) (None, 16, 16, 1024 263168 ['conv4_block3_2_relu[0][0]']
)
conv4_block3_3_bn (BatchNormal (None, 16, 16, 1024 4096 ['conv4_block3_3_conv[0][0]']
ization) )
conv4_block3_add (Add) (None, 16, 16, 1024 0 ['conv4_block2_out[0][0]',
) 'conv4_block3_3_bn[0][0]']
conv4_block3_out (Activation) (None, 16, 16, 1024 0 ['conv4_block3_add[0][0]']
)
conv4_block4_1_conv (Conv2D) (None, 16, 16, 256) 262400 ['conv4_block3_out[0][0]']
conv4_block4_1_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block4_1_conv[0][0]']
ization)
conv4_block4_1_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block4_1_bn[0][0]']
n)
conv4_block4_2_conv (Conv2D) (None, 16, 16, 256) 590080 ['conv4_block4_1_relu[0][0]']
conv4_block4_2_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block4_2_conv[0][0]']
ization)
conv4_block4_2_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block4_2_bn[0][0]']
n)
conv4_block4_3_conv (Conv2D) (None, 16, 16, 1024 263168 ['conv4_block4_2_relu[0][0]']
)
conv4_block4_3_bn (BatchNormal (None, 16, 16, 1024 4096 ['conv4_block4_3_conv[0][0]']
ization) )
conv4_block4_add (Add) (None, 16, 16, 1024 0 ['conv4_block3_out[0][0]',
) 'conv4_block4_3_bn[0][0]']
conv4_block4_out (Activation) (None, 16, 16, 1024 0 ['conv4_block4_add[0][0]']
)
conv4_block5_1_conv (Conv2D) (None, 16, 16, 256) 262400 ['conv4_block4_out[0][0]']
conv4_block5_1_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block5_1_conv[0][0]']
ization)
conv4_block5_1_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block5_1_bn[0][0]']
n)
conv4_block5_2_conv (Conv2D) (None, 16, 16, 256) 590080 ['conv4_block5_1_relu[0][0]']
conv4_block5_2_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block5_2_conv[0][0]']
ization)
conv4_block5_2_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block5_2_bn[0][0]']
n)
conv4_block5_3_conv (Conv2D) (None, 16, 16, 1024 263168 ['conv4_block5_2_relu[0][0]']
)
conv4_block5_3_bn (BatchNormal (None, 16, 16, 1024 4096 ['conv4_block5_3_conv[0][0]']
ization) )
conv4_block5_add (Add) (None, 16, 16, 1024 0 ['conv4_block4_out[0][0]',
) 'conv4_block5_3_bn[0][0]']
conv4_block5_out (Activation) (None, 16, 16, 1024 0 ['conv4_block5_add[0][0]']
)
conv4_block6_1_conv (Conv2D) (None, 16, 16, 256) 262400 ['conv4_block5_out[0][0]']
conv4_block6_1_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block6_1_conv[0][0]']
ization)
conv4_block6_1_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block6_1_bn[0][0]']
n)
conv4_block6_2_conv (Conv2D) (None, 16, 16, 256) 590080 ['conv4_block6_1_relu[0][0]']
conv4_block6_2_bn (BatchNormal (None, 16, 16, 256) 1024 ['conv4_block6_2_conv[0][0]']
ization)
conv4_block6_2_relu (Activatio (None, 16, 16, 256) 0 ['conv4_block6_2_bn[0][0]']
n)
conv4_block6_3_conv (Conv2D) (None, 16, 16, 1024 263168 ['conv4_block6_2_relu[0][0]']
)
conv4_block6_3_bn (BatchNormal (None, 16, 16, 1024 4096 ['conv4_block6_3_conv[0][0]']
ization) )
conv4_block6_add (Add) (None, 16, 16, 1024 0 ['conv4_block5_out[0][0]',
) 'conv4_block6_3_bn[0][0]']
conv4_block6_out (Activation) (None, 16, 16, 1024 0 ['conv4_block6_add[0][0]']
)
conv5_block1_1_conv (Conv2D) (None, 8, 8, 512) 524800 ['conv4_block6_out[0][0]']
conv5_block1_1_bn (BatchNormal (None, 8, 8, 512) 2048 ['conv5_block1_1_conv[0][0]']
ization)
conv5_block1_1_relu (Activatio (None, 8, 8, 512) 0 ['conv5_block1_1_bn[0][0]']
n)
conv5_block1_2_conv (Conv2D) (None, 8, 8, 512) 2359808 ['conv5_block1_1_relu[0][0]']
conv5_block1_2_bn (BatchNormal (None, 8, 8, 512) 2048 ['conv5_block1_2_conv[0][0]']
ization)
conv5_block1_2_relu (Activatio (None, 8, 8, 512) 0 ['conv5_block1_2_bn[0][0]']
n)
conv5_block1_0_conv (Conv2D) (None, 8, 8, 2048) 2099200 ['conv4_block6_out[0][0]']
conv5_block1_3_conv (Conv2D) (None, 8, 8, 2048) 1050624 ['conv5_block1_2_relu[0][0]']
conv5_block1_0_bn (BatchNormal (None, 8, 8, 2048) 8192 ['conv5_block1_0_conv[0][0]']
ization)
conv5_block1_3_bn (BatchNormal (None, 8, 8, 2048) 8192 ['conv5_block1_3_conv[0][0]']
ization)
conv5_block1_add (Add) (None, 8, 8, 2048) 0 ['conv5_block1_0_bn[0][0]',
'conv5_block1_3_bn[0][0]']
conv5_block1_out (Activation) (None, 8, 8, 2048) 0 ['conv5_block1_add[0][0]']
conv5_block2_1_conv (Conv2D) (None, 8, 8, 512) 1049088 ['conv5_block1_out[0][0]']
conv5_block2_1_bn (BatchNormal (None, 8, 8, 512) 2048 ['conv5_block2_1_conv[0][0]']
ization)
conv5_block2_1_relu (Activatio (None, 8, 8, 512) 0 ['conv5_block2_1_bn[0][0]']
n)
conv5_block2_2_conv (Conv2D) (None, 8, 8, 512) 2359808 ['conv5_block2_1_relu[0][0]']
conv5_block2_2_bn (BatchNormal (None, 8, 8, 512) 2048 ['conv5_block2_2_conv[0][0]']
ization)
conv5_block2_2_relu (Activatio (None, 8, 8, 512) 0 ['conv5_block2_2_bn[0][0]']
n)
conv5_block2_3_conv (Conv2D) (None, 8, 8, 2048) 1050624 ['conv5_block2_2_relu[0][0]']
conv5_block2_3_bn (BatchNormal (None, 8, 8, 2048) 8192 ['conv5_block2_3_conv[0][0]']
ization)
conv5_block2_add (Add) (None, 8, 8, 2048) 0 ['conv5_block1_out[0][0]',
'conv5_block2_3_bn[0][0]']
conv5_block2_out (Activation) (None, 8, 8, 2048) 0 ['conv5_block2_add[0][0]']
conv5_block3_1_conv (Conv2D) (None, 8, 8, 512) 1049088 ['conv5_block2_out[0][0]']
conv5_block3_1_bn (BatchNormal (None, 8, 8, 512) 2048 ['conv5_block3_1_conv[0][0]']
ization)
conv5_block3_1_relu (Activatio (None, 8, 8, 512) 0 ['conv5_block3_1_bn[0][0]']
n)
conv5_block3_2_conv (Conv2D) (None, 8, 8, 512) 2359808 ['conv5_block3_1_relu[0][0]']
conv5_block3_2_bn (BatchNormal (None, 8, 8, 512) 2048 ['conv5_block3_2_conv[0][0]']
ization)
conv5_block3_2_relu (Activatio (None, 8, 8, 512) 0 ['conv5_block3_2_bn[0][0]']
n)
conv5_block3_3_conv (Conv2D) (None, 8, 8, 2048) 1050624 ['conv5_block3_2_relu[0][0]']
conv5_block3_3_bn (BatchNormal (None, 8, 8, 2048) 8192 ['conv5_block3_3_conv[0][0]']
ization)
conv5_block3_add (Add) (None, 8, 8, 2048) 0 ['conv5_block2_out[0][0]',
'conv5_block3_3_bn[0][0]']
conv5_block3_out (Activation) (None, 8, 8, 2048) 0 ['conv5_block3_add[0][0]']
average_pooling2d (AveragePool (None, 2, 2, 2048) 0 ['conv5_block3_out[0][0]']
ing2D)
Flatten (Flatten) (None, 8192) 0 ['average_pooling2d[0][0]']
dense (Dense) (None, 256) 2097408 ['Flatten[0][0]']
dropout (Dropout) (None, 256) 0 ['dense[0][0]']
dense_1 (Dense) (None, 256) 65792 ['dropout[0][0]']
dropout_1 (Dropout) (None, 256) 0 ['dense_1[0][0]']
dense_2 (Dense) (None, 2) 514 ['dropout_1[0][0]']
==================================================================================================
Total params: 25,751,426
Trainable params: 25,698,306
Non-trainable params: 53,120
__________________________________________________________________________________________________
prediction = model.predict(test_generator)
pred = np.argmax(prediction, axis=1)
#pred = np.asarray(pred).astype('str')
original = np.asarray(test['mask']).astype('int')
from sklearn.metrics import accuracy_score, confusion_matrix, classification_report
accuracy = accuracy_score(original, pred)
print(accuracy)
cm = confusion_matrix(original, pred)
report = classification_report(original, pred, labels = [0,1])
print(report)
plt.figure(figsize = (5,5))
sns.heatmap(cm, annot=True);
0.9745762711864406
precision recall f1-score support
0 0.97 0.99 0.98 375
1 0.99 0.94 0.96 215
accuracy 0.97 590
macro avg 0.98 0.97 0.97 590
weighted avg 0.97 0.97 0.97 590
test_ids = list(X_test.image_path)
test_mask = list(X_test.mask_path)
test_data = DataGenerator(test_ids, test_mask)
_, tv = seg_model.evaluate(test_data)
print("Segmentation tversky is {:.2f}%".format(tv*100))
6/6 [==============================] - 4s 587ms/step - loss: 0.1688 - tversky: 0.9064 Segmentation tversky is 90.64%
def prediction(test, model, model_seg):
'''
Predcition function which takes dataframe containing ImageID as Input and perform 2 type of prediction on the image
Initially, image is passed through the classification network which predicts whether the image has defect or not, if the model
is 99% sure that the image has no defect, then the image is labeled as no-defect, if the model is not sure, it passes the image to the
segmentation network, it again checks if the image has defect or not, if it has defect, then the type and location of defect is found
'''
# empty list to store results
mask, image_id, has_mask = [], [], []
#itetrating through each image in test data
for i in test.image_path:
img = io.imread(i)
#normalizing
img = img *1./255.
#reshaping
img = cv2.resize(img, (256,256))
# converting img into array
img = np.array(img, dtype=np.float64)
#reshaping the image from 256,256,3 to 1,256,256,3
img = np.reshape(img, (1,256,256,3))
#making prediction for tumor in image
is_defect = model.predict(img)
#if tumour is not present we append the details of the image to the list
if np.argmax(is_defect)==0:
image_id.append(i)
has_mask.append(0)
mask.append('No mask :)')
continue
#Creating a empty array of shape 1,256,256,1
X = np.empty((1,256,256,3))
# read the image
img = io.imread(i)
#resizing the image and coverting them to array of type float64
img = cv2.resize(img, (256,256))
img = np.array(img, dtype=np.float64)
# standardising the image
img -= img.mean()
img /= img.std()
#converting the shape of image from 256,256,3 to 1,256,256,3
X[0,] = img
#make prediction of mask
predict = model_seg.predict(X)
# if sum of predicted mask is 0 then there is not tumour
if predict.round().astype(int).sum()==0:
image_id.append(i)
has_mask.append(0)
mask.append('No mask :)')
else:
#if the sum of pixel values are more than 0, then there is tumour
image_id.append(i)
has_mask.append(1)
mask.append(predict)
return pd.DataFrame({'image_path': image_id,'predicted_mask': mask,'has_mask': has_mask})
# making prediction
df_pred = prediction(test, model, seg_model)
df_pred
| image_path | predicted_mask | has_mask | |
|---|---|---|---|
| 0 | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | No mask :) | 0 |
| 1 | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | [[[[4.6963205e-08], [5.0886354e-07], [1.121319... | 1 |
| 2 | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | No mask :) | 0 |
| 3 | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | No mask :) | 0 |
| 4 | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | [[[[5.3284335e-08], [2.4559512e-07], [4.543847... | 1 |
| ... | ... | ... | ... |
| 585 | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | No mask :) | 0 |
| 586 | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | No mask :) | 0 |
| 587 | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | [[[[3.895526e-08], [4.4806472e-07], [8.1248714... | 1 |
| 588 | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | No mask :) | 0 |
| 589 | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | No mask :) | 0 |
590 rows × 3 columns
# merging original and prediction df
df_pred = test.merge(df_pred, on='image_path')
df_pred.head(10)
| image_path | mask_path | mask | predicted_mask | has_mask | |
|---|---|---|---|---|---|
| 0 | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | 0 | No mask :) | 0 |
| 1 | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | 1 | [[[[4.6963205e-08], [5.0886354e-07], [1.121319... | 1 |
| 2 | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | 0 | No mask :) | 0 |
| 3 | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | 0 | No mask :) | 0 |
| 4 | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | 1 | [[[[5.3284335e-08], [2.4559512e-07], [4.543847... | 1 |
| 5 | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | 0 | No mask :) | 0 |
| 6 | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | 0 | No mask :) | 0 |
| 7 | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | 1 | [[[[5.2790874e-08], [2.507918e-07], [4.4831563... | 1 |
| 8 | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | 0 | No mask :) | 0 |
| 9 | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | ../desktop/lgg-mri-segmentation/kaggle_3m\TCGA... | 1 | [[[[4.43354e-08], [2.4883016e-07], [5.5490364e... | 1 |
#visualizing prediction
count = 0
fig, axs = plt.subplots(15,5, figsize=(30,70))
for i in range(len(df_pred)):
if df_pred.has_mask[i]==1 and count<15:
#read mri images
img = io.imread(df_pred.image_path[i])
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
axs[count][0].imshow(img)
axs[count][0].title.set_text('Brain MRI')
#read original mask
mask = io.imread(df_pred.mask_path[i])
axs[count][1].imshow(mask)
axs[count][1].title.set_text('Original Mask')
#read predicted mask
pred = np.array(df_pred.predicted_mask[i]).squeeze().round()
axs[count][2].imshow(pred)
axs[count][2].title.set_text('AI predicted mask')
#overlay original mask with MRI
img[mask==255] = (255,0,0)
axs[count][3].imshow(img)
axs[count][3].title.set_text('Brain MRI with original mask (Ground Truth)')
#overlay predicted mask and MRI
img_ = io.imread(df_pred.image_path[i])
img_ = cv2.cvtColor(img_, cv2.COLOR_BGR2RGB)
img_[pred==1] = (0,255,150)
axs[count][4].imshow(img_)
axs[count][4].title.set_text('MRI with AI PREDICTED MASK')
count +=1
if (count==15):
break
fig.tight_layout()